file_name large_stringlengths 4 140 | prefix large_stringlengths 0 39k | suffix large_stringlengths 0 36.1k | middle large_stringlengths 0 29.4k | fim_type large_stringclasses 4
values |
|---|---|---|---|---|
experiment.py | #!/usr/bin/env python
"""
Manage and display experimental results.
"""
__license__ = 'MIT License <http://www.opensource.org/licenses/mit-license.php>'
__author__ = 'Lucas Theis <lucas@theis.io>'
__docformat__ = 'epytext'
__version__ = '0.4.3'
import sys
import os
import numpy
import random
import scipy
import socket
sys.path.append('./code')
from argparse import ArgumentParser
from pickle import Unpickler, dump
from subprocess import Popen, PIPE
from os import path
from warnings import warn
from time import time, strftime, localtime
from numpy import ceil, argsort
from numpy.random import rand, randint
from distutils.version import StrictVersion
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from httplib import HTTPConnection
from getopt import getopt
class Experiment:
"""
@type time: float
@ivar time: time at initialization of experiment
@type duration: float
@ivar duration: time in seconds between initialization and saving
@type script: string
@ivar script: stores the content of the main Python script
@type platform: string
@ivar platform: information about operating system
@type processors: string
@ivar processors: some information about the processors
@type environ: string
@ivar environ: environment variables at point of initialization
@type hostname: string
@ivar hostname: hostname of server running the experiment
@type cwd: string
@ivar cwd: working directory at execution time
@type comment: string
@ivar comment: a comment describing the experiment
@type results: dictionary
@ivar results: container to store experimental results
@type commit: string
@ivar commit: git commit hash
@type modified: boolean
@ivar modified: indicates uncommited changes
@type filename: string
@ivar filename: path to stored results
@type seed: int
@ivar seed: random seed used through the experiment
@type versions: dictionary
@ivar versions: versions of Python, numpy and scipy
"""
def __str__(self):
"""
Summarize information about the experiment.
@rtype: string
@return: summary of the experiment
"""
strl = []
# date and duration of experiment
strl.append(strftime('date \t\t %a, %d %b %Y %H:%M:%S', localtime(self.time)))
strl.append('duration \t ' + str(int(self.duration)) + 's')
strl.append('hostname \t ' + self.hostname)
# commit hash
if self.commit:
if self.modified:
strl.append('commit \t\t ' + self.commit + ' (modified)')
else:
strl.append('commit \t\t ' + self.commit)
# results
strl.append('results \t {' + ', '.join(map(str, self.results.keys())) + '}')
# comment
if self.comment:
strl.append('\n' + self.comment)
return '\n'.join(strl)
def __del__(self):
self.status(None)
def __init__(self, filename='', comment='', seed=None, server=None, port=8000):
"""
If the filename is given and points to an existing experiment, load it.
Otherwise store the current timestamp and try to get commit information
from the repository in the current directory.
@type filename: string
@param filename: path to where the experiment will be stored
@type comment: string
@param comment: a comment describing the experiment
@type seed: integer
@param seed: random seed used in the experiment
"""
self.id = 0
self.time = time()
self.comment = comment
self.filename = filename
self.results = {}
self.seed = seed
self.script = ''
self.cwd = ''
self.platform = ''
self.processors = ''
self.environ = ''
self.duration = 0
self.versions = {}
self.server = ''
if self.seed is None:
self.seed = int((time() + 1e6 * rand()) * 1e3) % 4294967295
# set random seed
random.seed(self.seed)
numpy.random.seed(self.seed)
if self.filename:
# load given experiment
self.load()
else:
# identifies the experiment
self.id = randint(1E8)
# check if a comment was passed via the command line
parser = ArgumentParser(add_help=False)
parser.add_argument('--comment')
optlist, argv = parser.parse_known_args(sys.argv[1:])
optlist = vars(optlist)
# remove comment command line argument from argument list
sys.argv[1:] = argv
# comment given as command line argument
self.comment = optlist.get('comment', '')
# get OS information
self.platform = sys.platform
# arguments to the program
self.argv = sys.argv
self.script_path = sys.argv[0]
try:
with open(sys.argv[0]) as handle:
# store python script
self.script = handle.read()
except:
warn('Unable to read Python script.')
# environment variables
self.environ = os.environ
self.cwd = os.getcwd()
self.hostname = socket.gethostname()
# store some information about the processor(s)
if self.platform == 'linux2':
cmd = 'egrep "processor|model name|cpu MHz|cache size" /proc/cpuinfo'
with os.popen(cmd) as handle:
self.processors = handle.read()
elif self.platform == 'darwin':
cmd = 'system_profiler SPHardwareDataType | egrep "Processor|Cores|L2|Bus"'
with os.popen(cmd) as handle:
self.processors = handle.read()
# version information
self.versions['python'] = sys.version
self.versions['numpy'] = numpy.__version__
self.versions['scipy'] = scipy.__version__
# store information about git repository
if path.isdir('.git'):
# get commit hash
pr1 = Popen(['git', 'log', '-1'], stdout=PIPE)
pr2 = Popen(['head', '-1'], stdin=pr1.stdout, stdout=PIPE)
pr3 = Popen(['cut', '-d', ' ', '-f', '2'], stdin=pr2.stdout, stdout=PIPE)
self.commit = pr3.communicate()[0][:-1]
# check if project contains uncommitted changes
pr1 = Popen(['git', 'status', '--porcelain'], stdout=PIPE)
pr2 = Popen(['egrep', '^.M'], stdin=pr1.stdout, stdout=PIPE)
self.modified = pr2.communicate()[0]
if self.modified:
warn('Uncommitted changes.')
else:
# no git repository
self.commit = None
self.modified = False
# server managing experiments
self.server = server
self.port = port
self.status('running')
def status(self, status, **kwargs):
if self.server:
try:
conn = HTTPConnection(self.server, self.port)
conn.request('GET', '/version/')
resp = conn.getresponse()
if not resp.read().startswith('Experiment'):
raise RuntimeError()
HTTPConnection(self.server, self.port).request('POST', '', str(dict({
'id': self.id,
'version': __version__,
'status': status,
'hostname': self.hostname,
'cwd': self.cwd,
'script_path': self.script_path,
'script': self.script,
'comment': self.comment,
'time': self.time,
}, **kwargs)))
except:
warn('Unable to connect to \'{0}:{1}\'.'.format(self.server, self.port))
def progress(self, progress):
self.status('PROGRESS', progress=progress)
def save(self, filename=None, overwrite=False):
"""
Store results. If a filename is given, the default is overwritten.
@type filename: string
@param filename: path to where the experiment will be stored
@type overwrite: boolean
@param overwrite: overwrite existing files
"""
self.duration = time() - self.time
if filename is None:
filename = self.filename
else:
# replace {0} and {1} by date and time
tmp1 = strftime('%d%m%Y', localtime(time()))
tmp2 = strftime('%H%M%S', localtime(time()))
filename = filename.format(tmp1, tmp2)
self.filename = filename
# make sure directory exists
try:
os.makedirs(path.dirname(filename))
except OSError:
pass
# make sure filename is unique
counter = 0
pieces = path.splitext(filename)
if not overwrite:
while path.exists(filename):
counter += 1
filename = pieces[0] + '.' + str(counter) + pieces[1]
if counter:
warn(''.join(pieces) + ' already exists. Saving to ' + filename + '.')
# store experiment
with open(filename, 'wb') as handle:
dump({
'version': __version__,
'id': self.id,
'time': self.time,
'seed': self.seed,
'duration': self.duration,
'environ': self.environ,
'hostname': self.hostname,
'cwd': self.cwd,
'argv': self.argv,
'script': self.script,
'script_path': self.script_path,
'processors': self.processors,
'platform': self.platform,
'comment': self.comment,
'commit': self.commit,
'modified': self.modified,
'versions': self.versions,
'results': self.results}, handle, 1)
self.status('SAVE', filename=filename, duration=self.duration)
def load(self, filename=None):
"""
Loads experimental results from the specified file.
@type filename: string
@param filename: path to where the experiment is stored
"""
if filename:
self.filename = filename
with open(self.filename, 'rb') as handle:
res = load(handle)
self.time = res['time']
self.seed = res['seed']
self.duration = res['duration']
self.processors = res['processors']
self.environ = res['environ']
self.platform = res['platform']
self.comment = res['comment']
self.commit = res['commit']
self.modified = res['modified']
self.versions = res['versions']
self.results = res['results']
self.argv = res['argv'] \
if StrictVersion(res['version']) >= '0.3.1' else None
self.script = res['script'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.script_path = res['script_path'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.cwd = res['cwd'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.hostname = res['hostname'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.id = res['id'] \
if StrictVersion(res['version']) >= '0.4.0' else None
def __getitem__(self, key):
return self.results[key]
def __setitem__(self, key, value):
self.results[key] = value
def __delitem__(self, key):
del self.results[key]
class ExperimentRequestHandler(BaseHTTPRequestHandler):
"""
Renders HTML showing running and finished experiments.
"""
xpck_path = ''
running = {}
finished = {}
def do_GET(self):
"""
Renders HTML displaying running and saved experiments.
"""
# number of bars representing progress
max_bars = 20
if self.path == '/version/':
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write('Experiment {0}'.format(__version__))
elif self.path.startswith('/running/'):
id = int([s for s in self.path.split('/') if s != ''][-1])
# display running experiment
if id in ExperimentRequestHandler.running:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>Experiment</h2>')
instance = ExperimentRequestHandler.running[id]
num_bars = int(instance['progress']) * max_bars / 100
self.wfile.write('<table>')
self.wfile.write('<tr><th>Experiment:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['script_path'])))
self.wfile.write('<tr><th>Hostname:</th><td>{0}</td></tr>'.format(instance['hostname']))
self.wfile.write('<tr><th>Status:</th><td class="running">{0}</td></tr>'.format(instance['status']))
self.wfile.write('<tr><th>Progress:</th><td class="progress"><span class="bars">{0}</span>{1}</td></tr>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<tr><th>Start:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['time']))))
self.wfile.write('<tr><th>Comment:</th><td>{0}</td></tr>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</table>')
self.wfile.write('<h2>Script</h2>')
self.wfile.write('<pre>{0}</pre>'.format(instance['script']))
self.wfile.write(HTML_FOOTER)
elif id in ExperimentRequestHandler.finished:
self.send_response(302)
self.send_header('Location', '/finished/{0}/'.format(id))
self.end_headers()
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>404</h2>')
self.wfile.write('Requested experiment not found.')
self.wfile.write(HTML_FOOTER)
elif self.path.startswith('/finished/'):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
id = int([s for s in self.path.split('/') if s != ''][-1])
# display finished experiment
if id in ExperimentRequestHandler.finished:
instance = ExperimentRequestHandler.finished[id]
if id in ExperimentRequestHandler.running:
progress = ExperimentRequestHandler.running[id]['progress']
else:
progress = 100
num_bars = int(progress) * max_bars / 100
self.wfile.write('<h2>Experiment</h2>')
self.wfile.write('<table>')
self.wfile.write('<tr><th>Experiment:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['script_path'])))
self.wfile.write('<tr><th>Results:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['filename'])))
self.wfile.write('<tr><th>Status:</th><td class="finished">{0}</td></tr>'.format(instance['status']))
self.wfile.write('<tr><th>Progress:</th><td class="progress"><span class="bars">{0}</span>{1}</td></tr>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<tr><th>Start:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['time']))))
self.wfile.write('<tr><th>End:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['duration']))))
self.wfile.write('<tr><th>Comment:</th><td>{0}</td></tr>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</table>')
self.wfile.write('<h2>Results</h2>')
try:
experiment = Experiment(os.path.join(instance['cwd'], instance['filename']))
except:
self.wfile.write('Could not open file.')
else:
self.wfile.write('<table>')
for key, value in experiment.results.items():
self.wfile.write('<tr><th>{0}</th><td>{1}</td></tr>'.format(key, value))
self.wfile.write('</table>')
self.wfile.write('<h2>Script</h2>')
self.wfile.write('<pre>{0}</pre>'.format(instance['script']))
else:
self.wfile.write('<h2>404</h2>')
self.wfile.write('Requested experiment not found.')
self.wfile.write(HTML_FOOTER)
else:
files = []
if 'xpck_path' in ExperimentRequestHandler.__dict__:
if ExperimentRequestHandler.xpck_path != '':
for path in ExperimentRequestHandler.xpck_path.split(':'):
files += [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xpck')]
if 'XPCK_PATH' in os.environ:
for path in os.environ['XPCK_PATH'].split(':'):
files += [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xpck')]
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>Running</h2>')
# display running experiments
if ExperimentRequestHandler.running:
|
else:
self.wfile.write('No running experiments.')
self.wfile.write('<h2>Saved</h2>')
# display saved experiments
if ExperimentRequestHandler.finished:
self.wfile.write('<table>')
self.wfile.write('<tr>')
self.wfile.write('<th>Results</th>')
self.wfile.write('<th>Status</th>')
self.wfile.write('<th>Progress</th>')
self.wfile.write('<th>Start</th>')
self.wfile.write('<th>End</th>')
self.wfile.write('<th>Comment</th>')
self.wfile.write('</tr>')
# sort ids by start time of experiment
times = [instance['time'] + instance['duration']
for instance in ExperimentRequestHandler.finished.values()]
ids = ExperimentRequestHandler.finished.keys()
ids = [ids[i] for i in argsort(times)][::-1]
for id in ids:
instance = ExperimentRequestHandler.finished[id]
if id in ExperimentRequestHandler.running:
progress = ExperimentRequestHandler.running[id]['progress']
else:
progress = 100
num_bars = int(progress) * max_bars / 100
self.wfile.write('<tr>')
self.wfile.write('<td class="filepath"><a href="/finished/{1}/">{0}</a></td>'.format(
instance['filename'], instance['id']))
self.wfile.write('<td class="finished">saved</td>')
self.wfile.write('<td class="progress"><span class="bars">{0}</span>{1}</td>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time']))))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time'] + instance['duration']))))
self.wfile.write('<td class="comment">{0}</td>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</tr>')
self.wfile.write('</table>')
else:
self.wfile.write('No saved experiments.')
self.wfile.write(HTML_FOOTER)
def do_POST(self):
instances = ExperimentRequestHandler.running
instance = eval(self.rfile.read(int(self.headers['Content-Length'])))
if instance['status'] is 'PROGRESS':
if instance['id'] not in instances:
instances[instance['id']] = instance
instances[instance['id']]['status'] = 'running'
instances[instance['id']]['progress'] = instance['progress']
elif instance['status'] is 'SAVE':
ExperimentRequestHandler.finished[instance['id']] = instance
ExperimentRequestHandler.finished[instance['id']]['status'] = 'saved'
else:
if instance['id'] in instances:
progress = instances[instance['id']]['progress']
else:
progress = 0
instances[instance['id']] = instance
instances[instance['id']]['progress'] = progress
if instance['status'] is None:
try:
del instances[instance['id']]
except:
pass
class XUnpickler(Unpickler):
"""
An extension of the Unpickler class which resolves some backwards
compatibility issues of Numpy.
"""
def find_class(self, module, name):
"""
Helps Unpickler to find certain Numpy modules.
"""
try:
numpy_version = StrictVersion(numpy.__version__)
if numpy_version >= '1.5.0':
if module == 'numpy.core.defmatrix':
module = 'numpy.matrixlib.defmatrix'
except ValueError:
pass
return Unpickler.find_class(self, module, name)
def load(file):
return XUnpickler(file).load()
def main(argv):
"""
Load and display experiment information.
"""
if len(argv) < 2:
print 'Usage:', argv[0], '[--server] [--port=<port>] [--path=<path>] [filename]'
return 0
optlist, argv = getopt(argv[1:], '', ['server', 'port=', 'path='])
optlist = dict(optlist)
if '--server' in optlist:
try:
ExperimentRequestHandler.xpck_path = optlist.get('--path', '')
port = optlist.get('--port', 8000)
# start server
server = HTTPServer(('', port), ExperimentRequestHandler)
server.serve_forever()
except KeyboardInterrupt:
server.socket.close()
return 0
# load experiment
experiment = Experiment(sys.argv[1])
if len(argv) > 1:
# print arguments
for arg in argv[1:]:
try:
print experiment[arg]
except:
print experiment[int(arg)]
return 0
# print summary of experiment
print experiment
return 0
HTML_HEADER = '''<html>
<head>
<title>Experiments</title>
<style type="text/css">
body {
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 11pt;
color: black;
background: white;
padding: 0pt 20pt;
}
h2 {
margin-top: 20pt;
font-size: 16pt;
}
table {
border-collapse: collapse;
}
tr:nth-child(even) {
background: #f4f4f4;
}
th {
font-size: 12pt;
text-align: left;
padding: 2pt 10pt 3pt 0pt;
}
td {
font-size: 10pt;
padding: 3pt 10pt 2pt 0pt;
}
pre {
font-size: 10pt;
background: #f4f4f4;
padding: 5pt;
}
a {
text-decoration: none;
color: #04a;
}
.running {
color: #08b;
}
.finished {
color: #390;
}
.comment {
min-width: 200pt;
font-style: italic;
}
.progress {
color: #ccc;
}
.progress .bars {
color: black;
}
</style>
</head>
<body>'''
HTML_FOOTER = '''
</body>
</html>'''
if __name__ == '__main__':
sys.exit(main(sys.argv))
| self.wfile.write('<table>')
self.wfile.write('<tr>')
self.wfile.write('<th>Experiment</th>')
self.wfile.write('<th>Hostname</th>')
self.wfile.write('<th>Status</th>')
self.wfile.write('<th>Progress</th>')
self.wfile.write('<th>Start</th>')
self.wfile.write('<th>Comment</th>')
self.wfile.write('</tr>')
# sort ids by start time of experiment
times = [instance['time'] for instance in ExperimentRequestHandler.running.values()]
ids = ExperimentRequestHandler.running.keys()
ids = [ids[i] for i in argsort(times)][::-1]
for id in ids:
instance = ExperimentRequestHandler.running[id]
num_bars = int(instance['progress']) * max_bars / 100
self.wfile.write('<tr>')
self.wfile.write('<td class="filepath"><a href="/running/{1}/">{0}</a></td>'.format(
instance['script_path'], instance['id']))
self.wfile.write('<td>{0}</td>'.format(instance['hostname']))
self.wfile.write('<td class="running">{0}</td>'.format(instance['status']))
self.wfile.write('<td class="progress"><span class="bars">{0}</span>{1}</td>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time']))))
self.wfile.write('<td class="comment">{0}</td>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</tr>')
self.wfile.write('</table>') | conditional_block |
experiment.py | #!/usr/bin/env python
"""
Manage and display experimental results.
"""
__license__ = 'MIT License <http://www.opensource.org/licenses/mit-license.php>'
__author__ = 'Lucas Theis <lucas@theis.io>'
__docformat__ = 'epytext'
__version__ = '0.4.3'
import sys
import os
import numpy
import random
import scipy
import socket
sys.path.append('./code')
from argparse import ArgumentParser
from pickle import Unpickler, dump
from subprocess import Popen, PIPE
from os import path
from warnings import warn
from time import time, strftime, localtime
from numpy import ceil, argsort
from numpy.random import rand, randint
from distutils.version import StrictVersion
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from httplib import HTTPConnection
from getopt import getopt
class Experiment:
"""
@type time: float
@ivar time: time at initialization of experiment
@type duration: float
@ivar duration: time in seconds between initialization and saving
@type script: string
@ivar script: stores the content of the main Python script
@type platform: string
@ivar platform: information about operating system
@type processors: string
@ivar processors: some information about the processors
@type environ: string
@ivar environ: environment variables at point of initialization
@type hostname: string
@ivar hostname: hostname of server running the experiment
@type cwd: string
@ivar cwd: working directory at execution time
@type comment: string
@ivar comment: a comment describing the experiment
@type results: dictionary
@ivar results: container to store experimental results
@type commit: string
@ivar commit: git commit hash
@type modified: boolean
@ivar modified: indicates uncommited changes
@type filename: string
@ivar filename: path to stored results
@type seed: int
@ivar seed: random seed used through the experiment
@type versions: dictionary
@ivar versions: versions of Python, numpy and scipy
"""
def __str__(self):
"""
Summarize information about the experiment.
@rtype: string
@return: summary of the experiment
"""
strl = []
# date and duration of experiment
strl.append(strftime('date \t\t %a, %d %b %Y %H:%M:%S', localtime(self.time)))
strl.append('duration \t ' + str(int(self.duration)) + 's')
strl.append('hostname \t ' + self.hostname)
# commit hash
if self.commit:
if self.modified:
strl.append('commit \t\t ' + self.commit + ' (modified)')
else:
strl.append('commit \t\t ' + self.commit)
# results
strl.append('results \t {' + ', '.join(map(str, self.results.keys())) + '}')
# comment
if self.comment:
strl.append('\n' + self.comment)
return '\n'.join(strl)
def __del__(self):
self.status(None)
def __init__(self, filename='', comment='', seed=None, server=None, port=8000):
"""
If the filename is given and points to an existing experiment, load it.
Otherwise store the current timestamp and try to get commit information
from the repository in the current directory.
@type filename: string
@param filename: path to where the experiment will be stored
@type comment: string
@param comment: a comment describing the experiment
@type seed: integer
@param seed: random seed used in the experiment
"""
self.id = 0
self.time = time()
self.comment = comment
self.filename = filename
self.results = {}
self.seed = seed
self.script = ''
self.cwd = ''
self.platform = ''
self.processors = ''
self.environ = ''
self.duration = 0
self.versions = {}
self.server = ''
if self.seed is None:
self.seed = int((time() + 1e6 * rand()) * 1e3) % 4294967295
# set random seed
random.seed(self.seed)
numpy.random.seed(self.seed)
if self.filename:
# load given experiment
self.load()
else:
# identifies the experiment
self.id = randint(1E8)
# check if a comment was passed via the command line
parser = ArgumentParser(add_help=False)
parser.add_argument('--comment')
optlist, argv = parser.parse_known_args(sys.argv[1:])
optlist = vars(optlist)
# remove comment command line argument from argument list
sys.argv[1:] = argv
# comment given as command line argument
self.comment = optlist.get('comment', '')
# get OS information
self.platform = sys.platform
# arguments to the program
self.argv = sys.argv
self.script_path = sys.argv[0]
try:
with open(sys.argv[0]) as handle:
# store python script
self.script = handle.read()
except:
warn('Unable to read Python script.')
# environment variables
self.environ = os.environ
self.cwd = os.getcwd()
self.hostname = socket.gethostname()
# store some information about the processor(s)
if self.platform == 'linux2':
cmd = 'egrep "processor|model name|cpu MHz|cache size" /proc/cpuinfo'
with os.popen(cmd) as handle:
self.processors = handle.read()
elif self.platform == 'darwin':
cmd = 'system_profiler SPHardwareDataType | egrep "Processor|Cores|L2|Bus"'
with os.popen(cmd) as handle:
self.processors = handle.read()
# version information
self.versions['python'] = sys.version
self.versions['numpy'] = numpy.__version__
self.versions['scipy'] = scipy.__version__
# store information about git repository
if path.isdir('.git'):
# get commit hash
pr1 = Popen(['git', 'log', '-1'], stdout=PIPE)
pr2 = Popen(['head', '-1'], stdin=pr1.stdout, stdout=PIPE)
pr3 = Popen(['cut', '-d', ' ', '-f', '2'], stdin=pr2.stdout, stdout=PIPE)
self.commit = pr3.communicate()[0][:-1]
# check if project contains uncommitted changes
pr1 = Popen(['git', 'status', '--porcelain'], stdout=PIPE)
pr2 = Popen(['egrep', '^.M'], stdin=pr1.stdout, stdout=PIPE)
self.modified = pr2.communicate()[0]
if self.modified:
warn('Uncommitted changes.')
else:
# no git repository
self.commit = None
self.modified = False
# server managing experiments
self.server = server
self.port = port
self.status('running')
def status(self, status, **kwargs):
if self.server:
try:
conn = HTTPConnection(self.server, self.port)
conn.request('GET', '/version/')
resp = conn.getresponse()
if not resp.read().startswith('Experiment'):
raise RuntimeError()
HTTPConnection(self.server, self.port).request('POST', '', str(dict({
'id': self.id,
'version': __version__,
'status': status,
'hostname': self.hostname,
'cwd': self.cwd,
'script_path': self.script_path,
'script': self.script,
'comment': self.comment,
'time': self.time,
}, **kwargs)))
except:
warn('Unable to connect to \'{0}:{1}\'.'.format(self.server, self.port))
def progress(self, progress):
self.status('PROGRESS', progress=progress)
def save(self, filename=None, overwrite=False):
"""
Store results. If a filename is given, the default is overwritten.
@type filename: string
@param filename: path to where the experiment will be stored
@type overwrite: boolean
@param overwrite: overwrite existing files
"""
self.duration = time() - self.time
if filename is None:
filename = self.filename
else:
# replace {0} and {1} by date and time
tmp1 = strftime('%d%m%Y', localtime(time()))
tmp2 = strftime('%H%M%S', localtime(time()))
filename = filename.format(tmp1, tmp2)
self.filename = filename
# make sure directory exists
try:
os.makedirs(path.dirname(filename))
except OSError:
pass
# make sure filename is unique
counter = 0
pieces = path.splitext(filename)
if not overwrite:
while path.exists(filename):
counter += 1
filename = pieces[0] + '.' + str(counter) + pieces[1]
if counter:
warn(''.join(pieces) + ' already exists. Saving to ' + filename + '.')
# store experiment
with open(filename, 'wb') as handle:
dump({
'version': __version__,
'id': self.id,
'time': self.time,
'seed': self.seed,
'duration': self.duration,
'environ': self.environ,
'hostname': self.hostname,
'cwd': self.cwd,
'argv': self.argv,
'script': self.script,
'script_path': self.script_path,
'processors': self.processors,
'platform': self.platform,
'comment': self.comment,
'commit': self.commit,
'modified': self.modified,
'versions': self.versions,
'results': self.results}, handle, 1)
self.status('SAVE', filename=filename, duration=self.duration)
def load(self, filename=None):
"""
Loads experimental results from the specified file.
@type filename: string
@param filename: path to where the experiment is stored
"""
if filename:
self.filename = filename
with open(self.filename, 'rb') as handle:
res = load(handle)
self.time = res['time']
self.seed = res['seed']
self.duration = res['duration']
self.processors = res['processors']
self.environ = res['environ']
self.platform = res['platform']
self.comment = res['comment']
self.commit = res['commit']
self.modified = res['modified']
self.versions = res['versions']
self.results = res['results']
self.argv = res['argv'] \
if StrictVersion(res['version']) >= '0.3.1' else None
self.script = res['script'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.script_path = res['script_path'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.cwd = res['cwd'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.hostname = res['hostname'] \
if StrictVersion(res['version']) >= '0.4.0' else None
self.id = res['id'] \
if StrictVersion(res['version']) >= '0.4.0' else None
def __getitem__(self, key):
return self.results[key]
def __setitem__(self, key, value):
self.results[key] = value
def __delitem__(self, key):
del self.results[key]
class ExperimentRequestHandler(BaseHTTPRequestHandler):
"""
Renders HTML showing running and finished experiments.
"""
xpck_path = ''
running = {}
finished = {}
def do_GET(self):
"""
Renders HTML displaying running and saved experiments.
"""
# number of bars representing progress
max_bars = 20
if self.path == '/version/':
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write('Experiment {0}'.format(__version__))
elif self.path.startswith('/running/'):
id = int([s for s in self.path.split('/') if s != ''][-1])
# display running experiment
if id in ExperimentRequestHandler.running:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>Experiment</h2>')
instance = ExperimentRequestHandler.running[id]
num_bars = int(instance['progress']) * max_bars / 100
self.wfile.write('<table>')
self.wfile.write('<tr><th>Experiment:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['script_path'])))
self.wfile.write('<tr><th>Hostname:</th><td>{0}</td></tr>'.format(instance['hostname']))
self.wfile.write('<tr><th>Status:</th><td class="running">{0}</td></tr>'.format(instance['status']))
self.wfile.write('<tr><th>Progress:</th><td class="progress"><span class="bars">{0}</span>{1}</td></tr>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<tr><th>Start:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['time']))))
self.wfile.write('<tr><th>Comment:</th><td>{0}</td></tr>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</table>')
self.wfile.write('<h2>Script</h2>')
self.wfile.write('<pre>{0}</pre>'.format(instance['script']))
self.wfile.write(HTML_FOOTER)
elif id in ExperimentRequestHandler.finished:
self.send_response(302)
self.send_header('Location', '/finished/{0}/'.format(id))
self.end_headers()
else:
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>404</h2>')
self.wfile.write('Requested experiment not found.')
self.wfile.write(HTML_FOOTER)
elif self.path.startswith('/finished/'):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
id = int([s for s in self.path.split('/') if s != ''][-1])
# display finished experiment
if id in ExperimentRequestHandler.finished:
instance = ExperimentRequestHandler.finished[id]
if id in ExperimentRequestHandler.running:
progress = ExperimentRequestHandler.running[id]['progress']
else:
progress = 100
num_bars = int(progress) * max_bars / 100
self.wfile.write('<h2>Experiment</h2>')
self.wfile.write('<table>')
self.wfile.write('<tr><th>Experiment:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['script_path'])))
self.wfile.write('<tr><th>Results:</th><td>{0}</td></tr>'.format(
os.path.join(instance['cwd'], instance['filename'])))
self.wfile.write('<tr><th>Status:</th><td class="finished">{0}</td></tr>'.format(instance['status']))
self.wfile.write('<tr><th>Progress:</th><td class="progress"><span class="bars">{0}</span>{1}</td></tr>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<tr><th>Start:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['time']))))
self.wfile.write('<tr><th>End:</th><td>{0}</td></tr>'.format(
strftime('%a, %d %b %Y %H:%M:%S', localtime(instance['duration']))))
self.wfile.write('<tr><th>Comment:</th><td>{0}</td></tr>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</table>')
self.wfile.write('<h2>Results</h2>')
try:
experiment = Experiment(os.path.join(instance['cwd'], instance['filename']))
except:
self.wfile.write('Could not open file.')
else:
self.wfile.write('<table>')
for key, value in experiment.results.items():
self.wfile.write('<tr><th>{0}</th><td>{1}</td></tr>'.format(key, value))
self.wfile.write('</table>')
self.wfile.write('<h2>Script</h2>')
self.wfile.write('<pre>{0}</pre>'.format(instance['script']))
else:
self.wfile.write('<h2>404</h2>')
self.wfile.write('Requested experiment not found.')
self.wfile.write(HTML_FOOTER)
else:
files = []
if 'xpck_path' in ExperimentRequestHandler.__dict__:
if ExperimentRequestHandler.xpck_path != '':
for path in ExperimentRequestHandler.xpck_path.split(':'):
files += [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xpck')]
if 'XPCK_PATH' in os.environ:
for path in os.environ['XPCK_PATH'].split(':'):
files += [os.path.join(path, f) for f in os.listdir(path) if f.endswith('.xpck')]
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(HTML_HEADER)
self.wfile.write('<h2>Running</h2>')
# display running experiments
if ExperimentRequestHandler.running:
self.wfile.write('<table>')
self.wfile.write('<tr>')
self.wfile.write('<th>Experiment</th>')
self.wfile.write('<th>Hostname</th>')
self.wfile.write('<th>Status</th>')
self.wfile.write('<th>Progress</th>')
self.wfile.write('<th>Start</th>')
self.wfile.write('<th>Comment</th>')
self.wfile.write('</tr>')
# sort ids by start time of experiment
times = [instance['time'] for instance in ExperimentRequestHandler.running.values()] | instance = ExperimentRequestHandler.running[id]
num_bars = int(instance['progress']) * max_bars / 100
self.wfile.write('<tr>')
self.wfile.write('<td class="filepath"><a href="/running/{1}/">{0}</a></td>'.format(
instance['script_path'], instance['id']))
self.wfile.write('<td>{0}</td>'.format(instance['hostname']))
self.wfile.write('<td class="running">{0}</td>'.format(instance['status']))
self.wfile.write('<td class="progress"><span class="bars">{0}</span>{1}</td>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time']))))
self.wfile.write('<td class="comment">{0}</td>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</tr>')
self.wfile.write('</table>')
else:
self.wfile.write('No running experiments.')
self.wfile.write('<h2>Saved</h2>')
# display saved experiments
if ExperimentRequestHandler.finished:
self.wfile.write('<table>')
self.wfile.write('<tr>')
self.wfile.write('<th>Results</th>')
self.wfile.write('<th>Status</th>')
self.wfile.write('<th>Progress</th>')
self.wfile.write('<th>Start</th>')
self.wfile.write('<th>End</th>')
self.wfile.write('<th>Comment</th>')
self.wfile.write('</tr>')
# sort ids by start time of experiment
times = [instance['time'] + instance['duration']
for instance in ExperimentRequestHandler.finished.values()]
ids = ExperimentRequestHandler.finished.keys()
ids = [ids[i] for i in argsort(times)][::-1]
for id in ids:
instance = ExperimentRequestHandler.finished[id]
if id in ExperimentRequestHandler.running:
progress = ExperimentRequestHandler.running[id]['progress']
else:
progress = 100
num_bars = int(progress) * max_bars / 100
self.wfile.write('<tr>')
self.wfile.write('<td class="filepath"><a href="/finished/{1}/">{0}</a></td>'.format(
instance['filename'], instance['id']))
self.wfile.write('<td class="finished">saved</td>')
self.wfile.write('<td class="progress"><span class="bars">{0}</span>{1}</td>'.format(
'|' * num_bars, '|' * (max_bars - num_bars)))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time']))))
self.wfile.write('<td>{0}</td>'.format(strftime('%a, %d %b %Y %H:%M:%S',
localtime(instance['time'] + instance['duration']))))
self.wfile.write('<td class="comment">{0}</td>'.format(
instance['comment'] if instance['comment'] else '-'))
self.wfile.write('</tr>')
self.wfile.write('</table>')
else:
self.wfile.write('No saved experiments.')
self.wfile.write(HTML_FOOTER)
def do_POST(self):
instances = ExperimentRequestHandler.running
instance = eval(self.rfile.read(int(self.headers['Content-Length'])))
if instance['status'] is 'PROGRESS':
if instance['id'] not in instances:
instances[instance['id']] = instance
instances[instance['id']]['status'] = 'running'
instances[instance['id']]['progress'] = instance['progress']
elif instance['status'] is 'SAVE':
ExperimentRequestHandler.finished[instance['id']] = instance
ExperimentRequestHandler.finished[instance['id']]['status'] = 'saved'
else:
if instance['id'] in instances:
progress = instances[instance['id']]['progress']
else:
progress = 0
instances[instance['id']] = instance
instances[instance['id']]['progress'] = progress
if instance['status'] is None:
try:
del instances[instance['id']]
except:
pass
class XUnpickler(Unpickler):
"""
An extension of the Unpickler class which resolves some backwards
compatibility issues of Numpy.
"""
def find_class(self, module, name):
"""
Helps Unpickler to find certain Numpy modules.
"""
try:
numpy_version = StrictVersion(numpy.__version__)
if numpy_version >= '1.5.0':
if module == 'numpy.core.defmatrix':
module = 'numpy.matrixlib.defmatrix'
except ValueError:
pass
return Unpickler.find_class(self, module, name)
def load(file):
return XUnpickler(file).load()
def main(argv):
"""
Load and display experiment information.
"""
if len(argv) < 2:
print 'Usage:', argv[0], '[--server] [--port=<port>] [--path=<path>] [filename]'
return 0
optlist, argv = getopt(argv[1:], '', ['server', 'port=', 'path='])
optlist = dict(optlist)
if '--server' in optlist:
try:
ExperimentRequestHandler.xpck_path = optlist.get('--path', '')
port = optlist.get('--port', 8000)
# start server
server = HTTPServer(('', port), ExperimentRequestHandler)
server.serve_forever()
except KeyboardInterrupt:
server.socket.close()
return 0
# load experiment
experiment = Experiment(sys.argv[1])
if len(argv) > 1:
# print arguments
for arg in argv[1:]:
try:
print experiment[arg]
except:
print experiment[int(arg)]
return 0
# print summary of experiment
print experiment
return 0
HTML_HEADER = '''<html>
<head>
<title>Experiments</title>
<style type="text/css">
body {
font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
font-size: 11pt;
color: black;
background: white;
padding: 0pt 20pt;
}
h2 {
margin-top: 20pt;
font-size: 16pt;
}
table {
border-collapse: collapse;
}
tr:nth-child(even) {
background: #f4f4f4;
}
th {
font-size: 12pt;
text-align: left;
padding: 2pt 10pt 3pt 0pt;
}
td {
font-size: 10pt;
padding: 3pt 10pt 2pt 0pt;
}
pre {
font-size: 10pt;
background: #f4f4f4;
padding: 5pt;
}
a {
text-decoration: none;
color: #04a;
}
.running {
color: #08b;
}
.finished {
color: #390;
}
.comment {
min-width: 200pt;
font-style: italic;
}
.progress {
color: #ccc;
}
.progress .bars {
color: black;
}
</style>
</head>
<body>'''
HTML_FOOTER = '''
</body>
</html>'''
if __name__ == '__main__':
sys.exit(main(sys.argv)) | ids = ExperimentRequestHandler.running.keys()
ids = [ids[i] for i in argsort(times)][::-1]
for id in ids: | random_line_split |
general.py | import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
settings = {"POLL_DURATION" : 60}
class RPS(Enum):
rock = "\N{MOYAI}"
paper = "\N{PAGE FACING UP}"
scissors = "\N{BLACK SCISSORS}"
class RPSParser:
def __init__(self, argument):
argument = argument.lower()
if argument == "rock":
self.choice = RPS.rock
elif argument == "paper":
self.choice = RPS.paper
elif argument == "scissors":
self.choice = RPS.scissors
else:
raise
class General:
"""General commands."""
def __init__(self, bot):
self.bot = bot
self.stopwatches = {}
self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good",
"Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again",
"Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again",
"Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
self.poll_sessions = []
@commands.command(hidden=True)
async def ping(self):
"""Pong."""
await self.bot.say("Pong.")
@commands.command()
async def choose(self, *choices):
"""Chooses between multiple choices.
To denote multiple choices, you should use double quotes.
"""
choices = [escape_mass_mentions(c) for c in choices]
if len(choices) < 2:
await self.bot.say('Not enough choices to pick from.')
else:
await self.bot.say(choice(choices))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(name="8", aliases=["8ball"])
async def _8ball(self, *, question : str):
"""Ask 8 ball a question
Question must end with a question mark.
"""
if question.endswith("?") and question != "?":
await self.bot.say("`" + choice(self.ball) + "`")
else:
await self.bot.say("That doesn't look like a question.")
@commands.command(aliases=["sw"], pass_context=True)
async def stopwatch(self, ctx):
"""Starts/stops stopwatch"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await self.bot.say(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.command()
async def lmgtfy(self, *, search_terms : str):
"""Creates a lmgtfy link"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms))
@commands.command(no_pm=True, hidden=True)
async def hug(self, user : discord.Member, intensity : int=1):
"""Because everyone likes hugs
Up to 10 intensity levels."""
name = italics(user.display_name)
if intensity <= 0:
msg = "(っ˘̩╭╮˘̩)っ" + name
elif intensity <= 3:
msg = "(っ´▽`)っ" + name
elif intensity <= 6:
msg = "╰(*´︶`*)╯" + name
elif intensity <= 9:
msg = "(つ≧▽≦)つ" + name
elif intensity >= 10:
msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name)
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, *, user: discord.Member=None):
"""Shows users's informations"""
author = ctx.message.author
server = ctx.message.server
if not user:
user = author
roles = [x.name for x in user.roles if x.name != "@everyone"]
joined_at = self.fetch_joined_at(user, server)
since_created = (ctx.message.timestamp - user.created_at).days
since_joined = (ctx.message.timestamp - joined_at).days
user_joined = joined_at.strftime("%d %b %Y %H:%M")
user_created = user.created_at.strftime("%d %b %Y %H:%M")
member_number = sorted(server.members,
key=lambda m: m.joined_at).index(user) + 1
created_on = "{}\n({} days ago)".format(user_created, since_created)
joined_on = "{}\n({} days ago)".format(user_joined, since_joined)
game = "Chilling in {} status".format(user.status)
if user.game is None:
pass
elif user.game.url is None:
game = "Playing {}".format(user.game)
else:
game = "Streaming: [{}]({})".format(user.game, user.game.url)
if roles:
roles = sorted(roles, key=[x.name for x in server.role_hierarchy
if x.name != "@everyone"].index)
roles = ", ".join(roles)
else:
roles = "None"
data = discord.Embed(description=game, colour=user.colour)
data.add_field(name="Joined Discord on", value=created_on)
data.add_field(name="Joined this server on", value=joined_on)
data.add_field(name="Roles", value=roles, inline=False)
data.set_footer(text="Member #{} | User ID:{}"
"".format(member_number, user.id))
name = str(user)
name = " ~ ".join((name, user.nick)) if user.nick else name
if user.avatar_url:
data.set_author(name=name, url=user.avatar_url)
data.set_thumbnail(url=user.avatar_url)
else:
data.set_author(name=name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows server's informations"""
server = ctx.message.server
online = len([m.status for m in server.members
if m.status == discord.Status.online or
m.status == discord.Status.idle])
total_users = len(server.members)
text_channels = len([x for x in server.channels
if x.type == discord.ChannelType.text])
voice_channels = len(server.channels) - text_channels
passed = (ctx.message.timestamp - server.created_at).days
created_at = ("Since {}. That's over {} days ago!"
"".format(server.created_at.strftime("%d %b %Y %H:%M"),
passed))
colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])
colour = int(colour, 16)
data = discord.Embed(
description=created_at,
colour=discord.Colour(value=colour))
data.add_field(name="Region", value=str(server.region))
data.add_field(name="Users", value="{}/{}".format(online, total_users))
data.add_field(name="Text Channels", value=text_channels)
data.add_field(name="Voice Channels", value=voice_channels)
data.add_field(name="Roles", value=len(server.roles))
data.add_field(name="Owner", value=str(server.owner))
data.set_footer(text="Server ID: " + server.id)
if server.icon_url:
data.set_author(name=server.name, url=server.icon_url)
data.set_thumbnail(url=server.icon_url)
else:
data.set_author(name=server.name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def urban(self, *, search_terms : str, definition_number : int=1):
"""Urban Dictionary search
Definition number must be between 1 and 10"""
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
# definition_number is just there to show up in the help
# all this mess is to avoid forcing double quotes on the user
| try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**Definition #{} out of {}:\n**{}\n\n"
"**Example:\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
for page in msg:
await self.bot.say(page)
else:
await self.bot.say("Your search terms gave no results.")
except IndexError:
await self.bot.say("There is no definition #{}".format(pos+1))
except:
await self.bot.say("Error.")
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *text):
"""Starts/stops a poll
Usage example:
poll Is this a poll?;Yes;No;Maybe
poll stop"""
message = ctx.message
if len(text) == 1:
if text[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(text).lower()
if "@everyone" in check or "@here" in check:
await self.bot.say("Nice try.")
return
p = NewPoll(message, " ".join(text), self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.say("poll question;option1;option2 (...)")
else:
await self.bot.say("A poll is already ongoing in this channel.")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author == message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await self.bot.say("Only admins and the author can stop the poll.")
else:
await self.bot.say("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.channel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
def fetch_joined_at(self, user, server):
"""Just a special case for someone special :^)"""
if user.id == "96130341705637888" and server.id == "133049272517001216":
return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000)
else:
return user.joined_at
class NewPoll():
def __init__(self, message, text, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = [ans.strip() for ans in text.split(";")]
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.already_voted = []
self.question = msg[0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.client.send_message(self.channel, msg)
await asyncio.sleep(settings["POLL_DURATION"])
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.client.send_message(self.channel, msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = General(bot)
bot.add_listener(n.check_poll_votes, "on_message")
bot.add_cog(n) |
search_terms = search_terms.split(" ")
| random_line_split |
general.py | import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
settings = {"POLL_DURATION" : 60}
class RPS(Enum):
rock = "\N{MOYAI}"
paper = "\N{PAGE FACING UP}"
scissors = "\N{BLACK SCISSORS}"
class RPSParser:
def __init__(self, argument):
argument = argument.lower()
if argument == "rock":
self.choice = RPS.rock
elif argument == "paper":
self.choice = RPS.paper
elif argument == "scissors":
self.choice = RPS.scissors
else:
raise
class General:
"""General commands."""
def __init__(self, bot):
self.bot = bot
self.stopwatches = {}
self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good",
"Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again",
"Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again",
"Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
self.poll_sessions = []
@commands.command(hidden=True)
async def ping(self):
"""Pong."""
await self.bot.say("Pong.")
@commands.command()
async def choose(self, *choices):
"""Chooses between multiple choices.
To denote multiple choices, you should use double quotes.
"""
choices = [escape_mass_mentions(c) for c in choices]
if len(choices) < 2:
await self.bot.say('Not enough choices to pick from.')
else:
await self.bot.say(choice(choices))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(name="8", aliases=["8ball"])
async def _8ball(self, *, question : str):
"""Ask 8 ball a question
Question must end with a question mark.
"""
if question.endswith("?") and question != "?":
await self.bot.say("`" + choice(self.ball) + "`")
else:
await self.bot.say("That doesn't look like a question.")
@commands.command(aliases=["sw"], pass_context=True)
async def stopwatch(self, ctx):
"""Starts/stops stopwatch"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await self.bot.say(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.command()
async def lmgtfy(self, *, search_terms : str):
"""Creates a lmgtfy link"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms))
@commands.command(no_pm=True, hidden=True)
async def hug(self, user : discord.Member, intensity : int=1):
"""Because everyone likes hugs
Up to 10 intensity levels."""
name = italics(user.display_name)
if intensity <= 0:
msg = "(っ˘̩╭╮˘̩)っ" + name
elif intensity <= 3:
msg = "(っ´▽`)っ" + name
elif intensity <= 6:
msg = "╰(*´︶`*)╯" + name
elif intensity <= 9:
msg = "(つ≧▽≦)つ" + name
elif intensity >= 10:
msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name)
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, *, user: discord.Member=None):
"""Shows users's informations"""
author = ctx.message.author
server = ctx.message.server
if not user:
user = author
roles = [x.name for x in user.roles if x.name != "@everyone"]
joined_at = self.fetch_joined_at(user, server)
since_created = (ctx.message.timestamp - user.created_at).days
since_joined = (ctx.message.timestamp - joined_at).days
user_joined = joined_at.strftime("%d %b %Y %H:%M")
user_created = user.created_at.strftime("%d %b %Y %H:%M")
member_number = sorted(server.members,
key=lambda m: m.joined_at).index(user) + 1
created_on = "{}\n({} days ago)".format(user_created, since_created)
joined_on = "{}\n({} days ago)".format(user_joined, since_joined)
game = "Chilling in {} status".format(user.status)
if user.game is None:
pass
elif user.game.url is None:
game = "Playing {}".format(user.game)
else:
game = "Streaming: [{}]({})".format(user.game, user.game.url)
if roles:
roles = sorted(roles, key=[x.name for x in server.role_hierarchy
if x.name != "@everyone"].index)
roles = ", ".join(roles)
else:
roles = "None"
data = discord.Embed(description=game, colour=user.colour)
data.add_field(name="Joined Discord on", value=created_on)
data.add_field(name="Joined this server on", value=joined_on)
data.add_field(name="Roles", value=roles, inline=False)
data.set_footer(text="Member #{} | User ID:{}"
"".format(member_number, user.id))
name = str(user)
name = " ~ ".join((name, user.nick)) if user.nick else name
if user.avatar_url:
data.set_author(name=name, url=user.avatar_url)
data.set_thumbnail(url=user.avatar_url)
else:
data.set_author(name=name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows server's informations"""
server = ctx.message.server
online = len([m.status for m in server.members
if m.status == discord.Status.online or
m.status == discord.Status.idle])
total_users = len(server.members)
text_channels = len([x for x in server.channels
if x.type == discord.ChannelType.text])
voice_channels = len(server.channels) - text_channels
passed = (ctx.message.timestamp - server.created_at).days
created_at = ("Since {}. That's over {} days ago!"
"".format(server.created_at.strftime("%d %b %Y %H:%M"),
passed))
colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])
colour = int(colour, 16)
data = discord.Embed(
description=created_at,
colour=discord.Colour(value=colour))
data.add_field(name="Region", value=str(server.region))
data.add_field(name="Users", value="{}/{}".format(online, total_users))
data.add_field(name="Text Channels", value=text_channels)
data.add_field(name="Voice Channels", value=voice_channels)
data.add_field(name="Roles", value=len(server.roles))
data.add_field(name="Owner", value=str(server.owner))
data.set_footer(text="Server ID: " + server.id)
if server.icon_url:
data.set_author(name=server.name, url=server.icon_url)
data.set_thumbnail(url=server.icon_url)
else:
data.set_author(name=server.name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def urban(self, *, search_terms : str, definition_number : int=1):
"""Urban Dictionary search
Definition number must be between 1 and 10"""
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
# definition_number is just there to show up in the help
# all this mess is to avoid forcing double quotes on the user
search_terms = search_terms.split(" ")
try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**Definition #{} out of {}:\n**{}\n\n"
"**Example:\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
for page in msg:
await self.bot.say(page)
else:
await self.bot.say("Your search terms gave no results.")
except IndexError:
await self.bot.say("There is no definition #{}".format(pos+1))
except:
await self.bot.say("Error.")
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *text):
"""Starts/stops a poll
Usage example:
poll Is this a poll?;Yes;No;Maybe
poll stop"""
message = ctx.message
if len(text) == 1:
if text[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(text).lower()
if "@everyone" in check or "@here" in check:
await self.bot.say("Nice try.")
return
p = NewPoll(message, " ".join(text), self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.say("poll question;option1;option2 (...)")
else:
await self.bot.say("A poll is already ongoing in this channel.")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author == message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await self.bot.say("Only admins and the author can stop the poll.")
else:
await self.bot.say("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.channel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
def fetch_joined_at(self, user, server):
"""Just a special case for someone special :^)"""
if user.id == "96130341705637888" and server.id == "133049272517001216":
return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000)
else:
return user.joined_at
class NewPoll():
def __init__(self, message, text, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = [ans.strip() for ans in text.split(";")]
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.alrea | 0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.client.send_message(self.channel, msg)
await asyncio.sleep(settings["POLL_DURATION"])
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.client.send_message(self.channel, msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = General(bot)
bot.add_listener(n.check_poll_votes, "on_message")
bot.add_cog(n)
| dy_voted = []
self.question = msg[ | conditional_block |
general.py | import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
settings = {"POLL_DURATION" : 60}
class RPS(Enum):
rock = "\N{MOYAI}"
paper = "\N{PAGE FACING UP}"
scissors = "\N{BLACK SCISSORS}"
class RPSParser:
def __init__(self, argument):
argument = argument.lower()
if argument == "rock":
self.choice = RPS.rock
elif argument == "paper":
self.choice = RPS.paper
elif argument == "scissors":
self.choice = RPS.scissors
else:
raise
class General:
"""General commands."""
def __init__(self, bot):
self.bot = bot
self.stopwatches = {}
self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good",
"Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again",
"Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again",
"Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
self.poll_sessions = []
@commands.command(hidden=True)
async def ping(self):
"""Pong."""
await self.bot.say("Pong.")
@commands.command()
async def choose(self, *choices):
"""Chooses between multiple choices.
To denote multiple choices, you should use double quotes.
"""
choices = [escape_mass_mentions(c) for c in choices]
if len(choices) < 2:
await self.bot.say('Not enough choices to pick from.')
else:
await self.bot.say(choice(choices))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(name="8", aliases=["8ball"])
async def _8ball(self, *, question : str):
"""Ask 8 ball a question
Question must end with a question mark.
"""
if question.endswith("?") and question != "?":
await self.bot.say("`" + choice(self.ball) + "`")
else:
await self.bot.say("That doesn't look like a question.")
@commands.command(aliases=["sw"], pass_context=True)
async def stopwatch(self, ctx):
"""Starts/stops stopwatch"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await self.bot.say(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.command()
async def lmgtfy(self, *, search_terms : str):
"""Creates a lmgtfy link"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms))
@commands.command(no_pm=True, hidden=True)
async def hug(self, user : discord.Member, intensity : int=1):
"""Because everyone likes hugs
Up to 10 intensity levels."""
name = italics(user.display_name)
if intensity <= 0:
msg = "(っ˘̩╭╮˘̩)っ" + name
elif intensity <= 3:
msg = "(っ´▽`)っ" + name
elif intensity <= 6:
msg = "╰(*´︶`*)╯" + name
elif intensity <= 9:
msg = "(つ≧▽≦)つ" + name
elif intensity >= 10:
msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name)
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, *, user: discord.Member=None):
"""Shows users's informations"""
author = ctx.message.author
server = ctx.message.server
if not user:
user = author
roles = [x.name for x in user.roles if x.name != "@everyone"]
joined_at = self.fetch_joined_at(user, server)
since_created = (ctx.message.timestamp - user.created_at).days
since_joined = (ctx.message.timestamp - joined_at).days
user_joined = joined_at.strftime("%d %b %Y %H:%M")
user_created = user.created_at.strftime("%d %b %Y %H:%M")
member_number = sorted(server.members,
key=lambda m: m.joined_at).index(user) + 1
created_on = "{}\n({} days ago)".format(user_created, since_created)
joined_on = "{}\n({} days ago)".format(user_joined, since_joined)
game = "Chilling in {} status".format(user.status)
if user.game is None:
pass
elif user.game.url is None:
game = "Playing {}".format(user.game)
else:
game = "Streaming: [{}]({})".format(user.game, user.game.url)
if roles:
roles = sorted(roles, key=[x.name for x in server.role_hierarchy
if x.name != "@everyone"].index)
roles = ", ".join(roles)
else:
roles = "None"
data = discord.Embed(description=game, colour=user.colour)
data.add_field(name="Joined Discord on", value=created_on)
data.add_field(name="Joined this server on", value=joined_on)
data.add_field(name="Roles", value=roles, inline=False)
data.set_footer(text="Member #{} | User ID:{}"
"".format(member_number, user.id))
name = str(user)
name = " ~ ".join((name, user.nick)) if user.nick else name
if user.avatar_url:
data.set_author(name=name, url=user.avatar_url)
data.set_thumbnail(url=user.avatar_url)
else:
data.set_author(name=name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows server's informations"""
server = ctx.message.server
online = len([m.status for m in server.members
if m.status == discord.Status.online or
m.status == discord.Status.idle])
total_users = len(server.members)
text_channels = len([x for x in server.channels
if x.type == discord.ChannelType.text])
voice_channels = len(server.channels) - text_channels
passed = (ctx.message.timestamp - server.created_at).days
created_at = ("Since {}. That's over {} days ago!"
"".format(server.created_at.strftime("%d %b %Y %H:%M"),
passed))
colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])
colour = int(colour, 16)
data = discord.Embed(
description=created_at,
colour=discord.Colour(value=colour))
data.add_field(name="Region", value=str(server.region))
data.add_field(name="Users", value="{}/{}".format(online, total_users))
data.add_field(name="Text Channels", value=text_channels)
data.add_field(name="Voice Channels", value=voice_channels)
data.add_field(name="Roles", value=len(server.roles))
data.add_field(name="Owner", value=str(server.owner))
data.set_footer(text="Server ID: " + server.id)
if server.icon_url:
data.set_author(name=server.name, url=server.icon_url)
data.set_thumbnail(url=server.icon_url)
else:
data.set_author(name=server.name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def urban(self, *, search_terms : str, definition_number : int=1):
"""Urban Dictionary search
Definition number must be between 1 and 10"""
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
# definition_number is just there to show up in the help
# all this mess is to avoid forcing double quotes on the user
search_terms = search_terms.split(" ")
try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**Definition #{} out of {}:\n**{}\n\n"
"**Example:\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
for page in msg:
await self.bot.say(page)
else:
await self.bot.say("Your search terms gave no results.")
except IndexError:
await self.bot.say("There is no definition #{}".format(pos+1))
except:
await self.bot.say("Error.")
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *text):
"""Starts/stops a poll
Usage example:
poll Is this a poll?;Yes;No;Maybe
poll stop"""
message = ctx.message
if len(text) == 1:
if text[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(text).lower()
if "@everyone" in check or "@here" in check:
await self.bot.say("Nice try.")
return
p = NewPoll(message, " ".join(text), self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.say("poll question;option1;option2 (...)")
else:
await self.bot.say("A poll is already ongoing in this channel.")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author == message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await self.bot.say("Only admins and the author can stop the poll.")
else:
await self.bot.say("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.channel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
def fetch_joined_at(self, user, server):
"""Just a special case for someone special :^)"""
if u | 0341705637888" and server.id == "133049272517001216":
return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000)
else:
return user.joined_at
class NewPoll():
def __init__(self, message, text, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = [ans.strip() for ans in text.split(";")]
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.already_voted = []
self.question = msg[0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.client.send_message(self.channel, msg)
await asyncio.sleep(settings["POLL_DURATION"])
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.client.send_message(self.channel, msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = General(bot)
bot.add_listener(n.check_poll_votes, "on_message")
bot.add_cog(n)
| ser.id == "9613 | identifier_name |
general.py | import discord
from discord.ext import commands
from .utils.chat_formatting import escape_mass_mentions, italics, pagify
from random import randint
from random import choice
from enum import Enum
from urllib.parse import quote_plus
import datetime
import time
import aiohttp
import asyncio
settings = {"POLL_DURATION" : 60}
class RPS(Enum):
rock = "\N{MOYAI}"
paper = "\N{PAGE FACING UP}"
scissors = "\N{BLACK SCISSORS}"
class RPSParser:
def __init__(self, argument):
argument = argument.lower()
if argument == "rock":
self.choice = RPS.rock
elif argument == "paper":
self.choice = RPS.paper
elif argument == "scissors":
self.choice = RPS.scissors
else:
raise
class General:
"""General commands."""
def __init__(self, bot):
self.bot = bot
self.stopwatches = {}
self.ball = ["As I see it, yes", "It is certain", "It is decidedly so", "Most likely", "Outlook good",
"Signs point to yes", "Without a doubt", "Yes", "Yes – definitely", "You may rely on it", "Reply hazy, try again",
"Ask again later", "Better not tell you now", "Cannot predict now", "Concentrate and ask again",
"Don't count on it", "My reply is no", "My sources say no", "Outlook not so good", "Very doubtful"]
self.poll_sessions = []
@commands.command(hidden=True)
async def ping(self):
"""Pong."""
await self.bot.say("Pong.")
@commands.command()
async def choose(self, *choices):
"""Chooses between multiple choices.
To denote multiple choices, you should use double quotes.
"""
choices = [escape_mass_mentions(c) for c in choices]
if len(choices) < 2:
await self.bot.say('Not enough choices to pick from.')
else:
await self.bot.say(choice(choices))
@commands.command(pass_context=True)
async def roll(self, ctx, number : int = 100):
"""Rolls random number (between 1 and user choice)
Defaults to 100.
"""
author = ctx.message.author
if number > 1:
n = randint(1, number)
await self.bot.say("{} :game_die: {} :game_die:".format(author.mention, n))
else:
await self.bot.say("{} Maybe higher than 1? ;P".format(author.mention))
@commands.command(pass_context=True)
async def flip(self, ctx, user : discord.Member=None):
"""Flips a coin... or a user.
Defaults to coin.
"""
if user != None:
msg = ""
if user.id == self.bot.user.id:
user = ctx.message.author
msg = "Nice try. You think this is funny? How about *this* instead:\n\n"
char = "abcdefghijklmnopqrstuvwxyz"
tran = "ɐqɔpǝɟƃɥᴉɾʞlɯuodbɹsʇnʌʍxʎz"
table = str.maketrans(char, tran)
name = user.display_name.translate(table)
char = char.upper()
tran = "∀qƆpƎℲפHIſʞ˥WNOԀQᴚS┴∩ΛMX⅄Z"
table = str.maketrans(char, tran)
name = name.translate(table)
await self.bot.say(msg + "(╯°□°)╯︵ " + name[::-1])
else:
await self.bot.say("*flips a coin and... " + choice(["HEADS!*", "TAILS!*"]))
@commands.command(pass_context=True)
async def rps(self, ctx, your_choice : RPSParser):
"""Play rock paper scissors"""
author = ctx.message.author
player_choice = your_choice.choice
red_choice = choice((RPS.rock, RPS.paper, RPS.scissors))
cond = {
(RPS.rock, RPS.paper) : False,
(RPS.rock, RPS.scissors) : True,
(RPS.paper, RPS.rock) : True,
(RPS.paper, RPS.scissors) : False,
(RPS.scissors, RPS.rock) : False,
(RPS.scissors, RPS.paper) : True
}
if red_choice == player_choice:
outcome = None # Tie
else:
outcome = cond[(player_choice, red_choice)]
if outcome is True:
await self.bot.say("{} You win {}!"
"".format(red_choice.value, author.mention))
elif outcome is False:
await self.bot.say("{} You lose {}!"
"".format(red_choice.value, author.mention))
else:
await self.bot.say("{} We're square {}!"
"".format(red_choice.value, author.mention))
@commands.command(name="8", aliases=["8ball"])
async def _8ball(self, *, question : str):
"""Ask 8 ball a question
Question must end with a question mark.
"""
if question.endswith("?") and question != "?":
await self.bot.say("`" + choice(self.ball) + "`")
else:
await self.bot.say("That doesn't look like a question.")
@commands.command(aliases=["sw"], pass_context=True)
async def stopwatch(self, ctx):
"""Starts/stops stopwatch"""
author = ctx.message.author
if not author.id in self.stopwatches:
self.stopwatches[author.id] = int(time.perf_counter())
await self.bot.say(author.mention + " Stopwatch started!")
else:
tmp = abs(self.stopwatches[author.id] - int(time.perf_counter()))
tmp = str(datetime.timedelta(seconds=tmp))
await self.bot.say(author.mention + " Stopwatch stopped! Time: **" + tmp + "**")
self.stopwatches.pop(author.id, None)
@commands.command()
async def lmgtfy(self, *, search_terms : str):
"""Creates a lmgtfy link"""
search_terms = escape_mass_mentions(search_terms.replace(" ", "+"))
await self.bot.say("https://lmgtfy.com/?q={}".format(search_terms))
@commands.command(no_pm=True, hidden=True)
async def hug(self, user : discord.Member, intensity : int=1):
"""Because everyone likes hugs
Up to 10 intensity levels."""
name = italics(user.display_name)
if intensity <= 0:
msg = "(っ˘̩╭╮˘̩)っ" + name
elif intensity <= 3:
msg = "(っ´▽`)っ" + name
elif intensity <= 6:
msg = "╰(*´︶`*)╯" + name
elif intensity <= 9:
msg = "(つ≧▽≦)つ" + name
elif intensity >= 10:
msg = "(づ ̄ ³ ̄)づ{} ⊂(´・ω・`⊂)".format(name)
await self.bot.say(msg)
@commands.command(pass_context=True, no_pm=True)
async def userinfo(self, ctx, *, user: discord.Member=None):
"""Shows users's informations"""
author = ctx.message.author
server = ctx.message.server
if not user:
user = author
roles = [x.name for x in user.roles if x.name != "@everyone"]
joined_at = self.fetch_joined_at(user, server)
since_created = (ctx.message.timestamp - user.created_at).days
since_joined = (ctx.message.timestamp - joined_at).days
user_joined = joined_at.strftime("%d %b %Y %H:%M")
user_created = user.created_at.strftime("%d %b %Y %H:%M")
member_number = sorted(server.members,
key=lambda m: m.joined_at).index(user) + 1
created_on = "{}\n({} days ago)".format(user_created, since_created)
joined_on = "{}\n({} days ago)".format(user_joined, since_joined)
game = "Chilling in {} status".format(user.status)
if user.game is None:
pass
elif user.game.url is None:
game = "Playing {}".format(user.game)
else:
game = "Streaming: [{}]({})".format(user.game, user.game.url)
if roles:
roles = sorted(roles, key=[x.name for x in server.role_hierarchy
if x.name != "@everyone"].index)
roles = ", ".join(roles)
else:
roles = "None"
data = discord.Embed(description=game, colour=user.colour)
data.add_field(name="Joined Discord on", value=created_on)
data.add_field(name="Joined this server on", value=joined_on)
data.add_field(name="Roles", value=roles, inline=False)
data.set_footer(text="Member #{} | User ID:{}"
"".format(member_number, user.id))
name = str(user)
name = " ~ ".join((name, user.nick)) if user.nick else name
if user.avatar_url:
data.set_author(name=name, url=user.avatar_url)
data.set_thumbnail(url=user.avatar_url)
else:
data.set_author(name=name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command(pass_context=True, no_pm=True)
async def serverinfo(self, ctx):
"""Shows server's informations"""
server = ctx.message.server
online = len([m.status for m in server.members
if m.status == discord.Status.online or
m.status == discord.Status.idle])
total_users = len(server.members)
text_channels = len([x for x in server.channels
if x.type == discord.ChannelType.text])
voice_channels = len(server.channels) - text_channels
passed = (ctx.message.timestamp - server.created_at).days
created_at = ("Since {}. That's over {} days ago!"
"".format(server.created_at.strftime("%d %b %Y %H:%M"),
passed))
colour = ''.join([choice('0123456789ABCDEF') for x in range(6)])
colour = int(colour, 16)
data = discord.Embed(
description=created_at,
colour=discord.Colour(value=colour))
data.add_field(name="Region", value=str(server.region))
data.add_field(name="Users", value="{}/{}".format(online, total_users))
data.add_field(name="Text Channels", value=text_channels)
data.add_field(name="Voice Channels", value=voice_channels)
data.add_field(name="Roles", value=len(server.roles))
data.add_field(name="Owner", value=str(server.owner))
data.set_footer(text="Server ID: " + server.id)
if server.icon_url:
data.set_author(name=server.name, url=server.icon_url)
data.set_thumbnail(url=server.icon_url)
else:
data.set_author(name=server.name)
try:
await self.bot.say(embed=data)
except discord.HTTPException:
await self.bot.say("I need the `Embed links` permission "
"to send this")
@commands.command()
async def urban(self, *, search_terms : str, definition_number : int=1):
"""Urban Dictionary search
Definition number must be between 1 and 10"""
def encode(s):
return quote_plus(s, encoding='utf-8', errors='replace')
# definition_number is just there to show up in the help
# all this mess is to avoid forcing double quotes on the user
search_terms = search_terms.split(" ")
try:
if len(search_terms) > 1:
pos = int(search_terms[-1]) - 1
search_terms = search_terms[:-1]
else:
pos = 0
if pos not in range(0, 11): # API only provides the
pos = 0 # top 10 definitions
except ValueError:
pos = 0
search_terms = "+".join([encode(s) for s in search_terms])
url = "http://api.urbandictionary.com/v0/define?term=" + search_terms
try:
async with aiohttp.get(url) as r:
result = await r.json()
if result["list"]:
definition = result['list'][pos]['definition']
example = result['list'][pos]['example']
defs = len(result['list'])
msg = ("**Definition #{} out of {}:\n**{}\n\n"
"**Example:\n**{}".format(pos+1, defs, definition,
example))
msg = pagify(msg, ["\n"])
for page in msg:
await self.bot.say(page)
else:
await self.bot.say("Your search terms gave no results.")
except IndexError:
await self.bot.say("There is no definition #{}".format(pos+1))
except:
await self.bot.say("Error.")
@commands.command(pass_context=True, no_pm=True)
async def poll(self, ctx, *text):
"""Starts/stops a poll
Usage example:
poll Is this a poll?;Yes;No;Maybe
poll stop"""
message = ctx.message
if len(text) == 1:
if text[0].lower() == "stop":
await self.endpoll(message)
return
if not self.getPollByChannel(message):
check = " ".join(text).lower()
if "@everyone" in check or "@here" in check:
await self.bot.say("Nice try.")
return
p = NewPoll(message, " ".join(text), self)
if p.valid:
self.poll_sessions.append(p)
await p.start()
else:
await self.bot.say("poll question;option1;option2 (...)")
else:
await self.bot.say("A poll is already ongoing in this channel.")
async def endpoll(self, message):
if self.getPollByChannel(message):
p = self.getPollByChannel(message)
if p.author = | annel == message.channel:
return poll
return False
async def check_poll_votes(self, message):
if message.author.id != self.bot.user.id:
if self.getPollByChannel(message):
self.getPollByChannel(message).checkAnswer(message)
def fetch_joined_at(self, user, server):
"""Just a special case for someone special :^)"""
if user.id == "96130341705637888" and server.id == "133049272517001216":
return datetime.datetime(2016, 1, 10, 6, 8, 4, 443000)
else:
return user.joined_at
class NewPoll():
def __init__(self, message, text, main):
self.channel = message.channel
self.author = message.author.id
self.client = main.bot
self.poll_sessions = main.poll_sessions
msg = [ans.strip() for ans in text.split(";")]
if len(msg) < 2: # Needs at least one question and 2 choices
self.valid = False
return None
else:
self.valid = True
self.already_voted = []
self.question = msg[0]
msg.remove(self.question)
self.answers = {}
i = 1
for answer in msg: # {id : {answer, votes}}
self.answers[i] = {"ANSWER" : answer, "VOTES" : 0}
i += 1
async def start(self):
msg = "**POLL STARTED!**\n\n{}\n\n".format(self.question)
for id, data in self.answers.items():
msg += "{}. *{}*\n".format(id, data["ANSWER"])
msg += "\nType the number to vote!"
await self.client.send_message(self.channel, msg)
await asyncio.sleep(settings["POLL_DURATION"])
if self.valid:
await self.endPoll()
async def endPoll(self):
self.valid = False
msg = "**POLL ENDED!**\n\n{}\n\n".format(self.question)
for data in self.answers.values():
msg += "*{}* - {} votes\n".format(data["ANSWER"], str(data["VOTES"]))
await self.client.send_message(self.channel, msg)
self.poll_sessions.remove(self)
def checkAnswer(self, message):
try:
i = int(message.content)
if i in self.answers.keys():
if message.author.id not in self.already_voted:
data = self.answers[i]
data["VOTES"] += 1
self.answers[i] = data
self.already_voted.append(message.author.id)
except ValueError:
pass
def setup(bot):
n = General(bot)
bot.add_listener(n.check_poll_votes, "on_message")
bot.add_cog(n)
| = message.author.id: # or isMemberAdmin(message)
await self.getPollByChannel(message).endPoll()
else:
await self.bot.say("Only admins and the author can stop the poll.")
else:
await self.bot.say("There's no poll ongoing in this channel.")
def getPollByChannel(self, message):
for poll in self.poll_sessions:
if poll.ch | identifier_body |
runLCWeekly.py | import sys
from fermipy import utils
utils.init_matplotlib_backend()
from fermipy.gtanalysis import GTAnalysis
from fermipy.utils import *
import yaml
import pprint
import numpy
import argparse
from fermipy.gtanalysis import GTAnalysis
def main():
usage = "usage: %(prog)s [config file]"
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)
parser.add_argument('--config', default = 'sample_config.yaml')
parser.add_argument('--source', default = None)
args = parser.parse_args()
gta = GTAnalysis(args.config)
if args.source is None:
|
gta.setup()
gta.optimize()
loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}
srcs = gta.find_sources(model=model, sqrt_ts_threshold=5.0,
min_separation=0.5)
sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
gta.tsmap(make_plots=True)
gta.write_roi('fit0')
lc = gta.lightcurve(src_name, binsz=86400.*7.0, free_radius=3.0, use_scaled_srcmap=True,
multithread=False)
if __name__ == "__main__":
main()
| src_name = gta.roi.sources[0].name | conditional_block |
runLCWeekly.py | import sys
from fermipy import utils
utils.init_matplotlib_backend()
from fermipy.gtanalysis import GTAnalysis
from fermipy.utils import *
import yaml
import pprint
import numpy
import argparse
from fermipy.gtanalysis import GTAnalysis
def | ():
usage = "usage: %(prog)s [config file]"
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)
parser.add_argument('--config', default = 'sample_config.yaml')
parser.add_argument('--source', default = None)
args = parser.parse_args()
gta = GTAnalysis(args.config)
if args.source is None:
src_name = gta.roi.sources[0].name
gta.setup()
gta.optimize()
loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}
srcs = gta.find_sources(model=model, sqrt_ts_threshold=5.0,
min_separation=0.5)
sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
gta.tsmap(make_plots=True)
gta.write_roi('fit0')
lc = gta.lightcurve(src_name, binsz=86400.*7.0, free_radius=3.0, use_scaled_srcmap=True,
multithread=False)
if __name__ == "__main__":
main()
| main | identifier_name |
runLCWeekly.py | import sys
from fermipy import utils
utils.init_matplotlib_backend()
from fermipy.gtanalysis import GTAnalysis
from fermipy.utils import *
import yaml
import pprint
import numpy
import argparse
from fermipy.gtanalysis import GTAnalysis
def main():
usage = "usage: %(prog)s [config file]"
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)
parser.add_argument('--config', default = 'sample_config.yaml')
parser.add_argument('--source', default = None)
args = parser.parse_args()
gta = GTAnalysis(args.config)
if args.source is None:
src_name = gta.roi.sources[0].name
gta.setup()
gta.optimize()
loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}
srcs = gta.find_sources(model=model, sqrt_ts_threshold=5.0,
min_separation=0.5)
sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
gta.tsmap(make_plots=True) | lc = gta.lightcurve(src_name, binsz=86400.*7.0, free_radius=3.0, use_scaled_srcmap=True,
multithread=False)
if __name__ == "__main__":
main() | gta.write_roi('fit0') | random_line_split |
runLCWeekly.py | import sys
from fermipy import utils
utils.init_matplotlib_backend()
from fermipy.gtanalysis import GTAnalysis
from fermipy.utils import *
import yaml
import pprint
import numpy
import argparse
from fermipy.gtanalysis import GTAnalysis
def main():
|
if __name__ == "__main__":
main()
| usage = "usage: %(prog)s [config file]"
description = "Run fermipy analysis chain."
parser = argparse.ArgumentParser(usage=usage,description=description)
parser.add_argument('--config', default = 'sample_config.yaml')
parser.add_argument('--source', default = None)
args = parser.parse_args()
gta = GTAnalysis(args.config)
if args.source is None:
src_name = gta.roi.sources[0].name
gta.setup()
gta.optimize()
loc = gta.localize(src_name, free_radius=1.0, update=True, make_plots=True)
model = {'Index' : 2.0, 'SpatialModel' : 'PointSource'}
srcs = gta.find_sources(model=model, sqrt_ts_threshold=5.0,
min_separation=0.5)
sed = gta.sed(src_name, free_radius=1.0, make_plots=True)
gta.tsmap(make_plots=True)
gta.write_roi('fit0')
lc = gta.lightcurve(src_name, binsz=86400.*7.0, free_radius=3.0, use_scaled_srcmap=True,
multithread=False) | identifier_body |
weather_list.js | import React, { Component } from 'react';
import { connect } from 'react-redux';
import Chart from '../components/chart';
import GoogleMap from '../components/google_map'
//npm i --save react-sparklines@1.6.0
class WeatherList extends Component {
renderWeather(cityData){
const name = cityData.city.name;
const temps = cityData.list.map(weather => weather.main.temp);
const pressure = cityData.list.map(weather => weather.main.pressure);
const humidity = cityData.list.map(weather => weather.main.humidity);
const { lon, lat } = cityData.city.coord;
return (
<tr key = {name}>
<td><GoogleMap lon={lon} lat={lat} /> </td>
<td><Chart data={temps} color="orange" units="°C" /></td>
<td><Chart data={pressure} color="green" units="hPa" /></td>
<td><Chart data={humidity} color="black" units= "%" /></td>
</tr>
)
}
render () {
return (
<table className = "table table-hover">
<thead>
<tr>
<th> City </th> | <th> Humidity (%) </th>
</tr>
</thead>
<tbody>
{this.props.weather.map(this.renderWeather)}
</tbody>
</table>
)};
}
function mapStateToProps (state){
return { weather: state.weather};
}
export default connect (mapStateToProps)(WeatherList); | <th> Temperature (°C) </th>
<th> Pressure (hPa) </th> | random_line_split |
weather_list.js | import React, { Component } from 'react';
import { connect } from 'react-redux';
import Chart from '../components/chart';
import GoogleMap from '../components/google_map'
//npm i --save react-sparklines@1.6.0
class WeatherList extends Component {
renderWeather(cityData){
const name = cityData.city.name;
const temps = cityData.list.map(weather => weather.main.temp);
const pressure = cityData.list.map(weather => weather.main.pressure);
const humidity = cityData.list.map(weather => weather.main.humidity);
const { lon, lat } = cityData.city.coord;
return (
<tr key = {name}>
<td><GoogleMap lon={lon} lat={lat} /> </td>
<td><Chart data={temps} color="orange" units="°C" /></td>
<td><Chart data={pressure} color="green" units="hPa" /></td>
<td><Chart data={humidity} color="black" units= "%" /></td>
</tr>
)
}
render () {
return (
<table className = "table table-hover">
<thead>
<tr>
<th> City </th>
<th> Temperature (°C) </th>
<th> Pressure (hPa) </th>
<th> Humidity (%) </th>
</tr>
</thead>
<tbody>
{this.props.weather.map(this.renderWeather)}
</tbody>
</table>
)};
}
function ma | state){
return { weather: state.weather};
}
export default connect (mapStateToProps)(WeatherList); | pStateToProps ( | identifier_name |
weather_list.js | import React, { Component } from 'react';
import { connect } from 'react-redux';
import Chart from '../components/chart';
import GoogleMap from '../components/google_map'
//npm i --save react-sparklines@1.6.0
class WeatherList extends Component {
renderWeather(cityData){
const name = cityData.city.name;
const temps = cityData.list.map(weather => weather.main.temp);
const pressure = cityData.list.map(weather => weather.main.pressure);
const humidity = cityData.list.map(weather => weather.main.humidity);
const { lon, lat } = cityData.city.coord;
return (
<tr key = {name}>
<td><GoogleMap lon={lon} lat={lat} /> </td>
<td><Chart data={temps} color="orange" units="°C" /></td>
<td><Chart data={pressure} color="green" units="hPa" /></td>
<td><Chart data={humidity} color="black" units= "%" /></td>
</tr>
)
}
render () {
return (
<table className = "table table-hover">
<thead>
<tr>
<th> City </th>
<th> Temperature (°C) </th>
<th> Pressure (hPa) </th>
<th> Humidity (%) </th>
</tr>
</thead>
<tbody>
{this.props.weather.map(this.renderWeather)}
</tbody>
</table>
)};
}
function mapStateToProps (state){
| export default connect (mapStateToProps)(WeatherList); | return { weather: state.weather};
}
| identifier_body |
instances.controller.ts | /*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as _ from 'lodash';
interface IInstancesScope extends ng.IScope {
switchDisplayInstances(): void;
displayAllInstances: boolean;
}
class InstancesController {
private instances: any;
private startedInstances: any;
private _displayEmptyMode: boolean;
private searchGatewayInstances: string;
constructor(
private $scope: IInstancesScope) {
'ngInject';
}
$onInit() {
this.searchGatewayInstances = '';
this.startedInstances = _.filter(this.instances, { 'state': 'started'});
this._displayEmptyMode = this.startedInstances.length === 0;
this.$scope.displayAllInstances = false;
let that = this;
this.$scope.switchDisplayInstances = function() {
that.$scope.displayAllInstances = !that.$scope.displayAllInstances;
if (!that.$scope.displayAllInstances) {
that._displayEmptyMode = that.startedInstances.length === 0;
} else {
that._displayEmptyMode = that.instances.length === 0;
}
};
}
getOSIcon(osName) {
if (osName) {
var lowerOSName = osName.toLowerCase();
if (lowerOSName.indexOf('mac') >= 0) {
return 'apple';
} else if (lowerOSName.indexOf('nix') >= 0 || lowerOSName.indexOf('nux') >= 0 || lowerOSName.indexOf('aix') >= 0) {
return 'desktop_windows';
} else if (lowerOSName.indexOf('win') >= 0) {
return 'windows';
}
}
return 'desktop_windows';
}
| () {
return this.instances.length === 0;
}
}
export default InstancesController;
| displayEmptyMode | identifier_name |
instances.controller.ts | /*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as _ from 'lodash';
interface IInstancesScope extends ng.IScope {
switchDisplayInstances(): void;
displayAllInstances: boolean;
}
class InstancesController {
private instances: any;
private startedInstances: any;
private _displayEmptyMode: boolean;
private searchGatewayInstances: string;
constructor(
private $scope: IInstancesScope) |
$onInit() {
this.searchGatewayInstances = '';
this.startedInstances = _.filter(this.instances, { 'state': 'started'});
this._displayEmptyMode = this.startedInstances.length === 0;
this.$scope.displayAllInstances = false;
let that = this;
this.$scope.switchDisplayInstances = function() {
that.$scope.displayAllInstances = !that.$scope.displayAllInstances;
if (!that.$scope.displayAllInstances) {
that._displayEmptyMode = that.startedInstances.length === 0;
} else {
that._displayEmptyMode = that.instances.length === 0;
}
};
}
getOSIcon(osName) {
if (osName) {
var lowerOSName = osName.toLowerCase();
if (lowerOSName.indexOf('mac') >= 0) {
return 'apple';
} else if (lowerOSName.indexOf('nix') >= 0 || lowerOSName.indexOf('nux') >= 0 || lowerOSName.indexOf('aix') >= 0) {
return 'desktop_windows';
} else if (lowerOSName.indexOf('win') >= 0) {
return 'windows';
}
}
return 'desktop_windows';
}
displayEmptyMode() {
return this.instances.length === 0;
}
}
export default InstancesController;
| {
'ngInject';
} | identifier_body |
instances.controller.ts | /*
* Copyright (C) 2015 The Gravitee team (http://gravitee.io)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at | *
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as _ from 'lodash';
interface IInstancesScope extends ng.IScope {
switchDisplayInstances(): void;
displayAllInstances: boolean;
}
class InstancesController {
private instances: any;
private startedInstances: any;
private _displayEmptyMode: boolean;
private searchGatewayInstances: string;
constructor(
private $scope: IInstancesScope) {
'ngInject';
}
$onInit() {
this.searchGatewayInstances = '';
this.startedInstances = _.filter(this.instances, { 'state': 'started'});
this._displayEmptyMode = this.startedInstances.length === 0;
this.$scope.displayAllInstances = false;
let that = this;
this.$scope.switchDisplayInstances = function() {
that.$scope.displayAllInstances = !that.$scope.displayAllInstances;
if (!that.$scope.displayAllInstances) {
that._displayEmptyMode = that.startedInstances.length === 0;
} else {
that._displayEmptyMode = that.instances.length === 0;
}
};
}
getOSIcon(osName) {
if (osName) {
var lowerOSName = osName.toLowerCase();
if (lowerOSName.indexOf('mac') >= 0) {
return 'apple';
} else if (lowerOSName.indexOf('nix') >= 0 || lowerOSName.indexOf('nux') >= 0 || lowerOSName.indexOf('aix') >= 0) {
return 'desktop_windows';
} else if (lowerOSName.indexOf('win') >= 0) {
return 'windows';
}
}
return 'desktop_windows';
}
displayEmptyMode() {
return this.instances.length === 0;
}
}
export default InstancesController; | random_line_split | |
Header.js | import React from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
import { FormattedMessage } from 'react-intl';
// Import Style
import styles from './Header.css';
export function Header(props, context) {
const languageNodes = props.intl.enabledLanguages.map(
lang => <li key={lang} onClick={() => props.switchLanguage(lang)} className={lang === props.intl.locale ? styles.selected : ''}>{lang}</li>
);
const renderAddPostButton = context.router.isActive ? context.router.isActive() :
context.router.route.location.pathname === '/';
return (
<div className={styles.header}>
<div className={styles['language-switcher']}>
<ul> | </div>
<div className={styles.content}>
<h1 className={styles['site-title']}>
<Link to="/" ><FormattedMessage id="siteTitle" /></Link>
</h1>
{
renderAddPostButton
? <a className={styles['add-post-button']} href="#" onClick={props.toggleAddPost}><FormattedMessage id="addPost" /></a>
: null
}
</div>
</div>
);
}
Header.contextTypes = {
router: PropTypes.object,
};
Header.propTypes = {
toggleAddPost: PropTypes.func.isRequired,
switchLanguage: PropTypes.func.isRequired,
intl: PropTypes.object.isRequired,
};
export default Header; | <li><FormattedMessage id="switchLanguage" /></li>
{languageNodes}
</ul> | random_line_split |
Header.js | import React from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
import { FormattedMessage } from 'react-intl';
// Import Style
import styles from './Header.css';
export function Header(props, context) |
Header.contextTypes = {
router: PropTypes.object,
};
Header.propTypes = {
toggleAddPost: PropTypes.func.isRequired,
switchLanguage: PropTypes.func.isRequired,
intl: PropTypes.object.isRequired,
};
export default Header;
| {
const languageNodes = props.intl.enabledLanguages.map(
lang => <li key={lang} onClick={() => props.switchLanguage(lang)} className={lang === props.intl.locale ? styles.selected : ''}>{lang}</li>
);
const renderAddPostButton = context.router.isActive ? context.router.isActive() :
context.router.route.location.pathname === '/';
return (
<div className={styles.header}>
<div className={styles['language-switcher']}>
<ul>
<li><FormattedMessage id="switchLanguage" /></li>
{languageNodes}
</ul>
</div>
<div className={styles.content}>
<h1 className={styles['site-title']}>
<Link to="/" ><FormattedMessage id="siteTitle" /></Link>
</h1>
{
renderAddPostButton
? <a className={styles['add-post-button']} href="#" onClick={props.toggleAddPost}><FormattedMessage id="addPost" /></a>
: null
}
</div>
</div>
);
} | identifier_body |
Header.js | import React from 'react';
import PropTypes from 'prop-types';
import { Link } from 'react-router-dom';
import { FormattedMessage } from 'react-intl';
// Import Style
import styles from './Header.css';
export function | (props, context) {
const languageNodes = props.intl.enabledLanguages.map(
lang => <li key={lang} onClick={() => props.switchLanguage(lang)} className={lang === props.intl.locale ? styles.selected : ''}>{lang}</li>
);
const renderAddPostButton = context.router.isActive ? context.router.isActive() :
context.router.route.location.pathname === '/';
return (
<div className={styles.header}>
<div className={styles['language-switcher']}>
<ul>
<li><FormattedMessage id="switchLanguage" /></li>
{languageNodes}
</ul>
</div>
<div className={styles.content}>
<h1 className={styles['site-title']}>
<Link to="/" ><FormattedMessage id="siteTitle" /></Link>
</h1>
{
renderAddPostButton
? <a className={styles['add-post-button']} href="#" onClick={props.toggleAddPost}><FormattedMessage id="addPost" /></a>
: null
}
</div>
</div>
);
}
Header.contextTypes = {
router: PropTypes.object,
};
Header.propTypes = {
toggleAddPost: PropTypes.func.isRequired,
switchLanguage: PropTypes.func.isRequired,
intl: PropTypes.object.isRequired,
};
export default Header;
| Header | identifier_name |
create.py | """Creates a user """
# :license: MIT, see LICENSE for more details.
import json
import string
import sys
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('username')
@click.option('--email', '-e', required=True,
help="Email address for this user. Required for creation.")
@click.option('--password', '-p', default=None, show_default=True,
help="Password to set for this user. If no password is provided, user will be sent an email "
"to generate one, which expires in 24 hours. '-p generate' will create a password for you "
"(Requires Python 3.6+). Passwords require 8+ characters, upper and lowercase, a number "
"and a symbol.")
@click.option('--from-user', '-u', default=None,
help="Base user to use as a template for creating this user. "
"Will default to the user running this command. Information provided in --template "
"supersedes this template.")
@click.option('--template', '-t', default=None,
help="A json string describing https://softlayer.github.io/reference/datatypes/SoftLayer_User_Customer/")
@environment.pass_env
def | (env, username, email, password, from_user, template):
"""Creates a user Users.
Remember to set the permissions and access for this new user.
Example::
slcli user create my@email.com -e my@email.com -p generate -a
-t '{"firstName": "Test", "lastName": "Testerson"}'
"""
mgr = SoftLayer.UserManager(env.client)
user_mask = ("mask[id, firstName, lastName, email, companyName, address1, city, country, postalCode, "
"state, userStatusId, timezoneId]")
from_user_id = None
if from_user is None:
user_template = mgr.get_current_user(objectmask=user_mask)
from_user_id = user_template['id']
else:
from_user_id = helpers.resolve_id(mgr.resolve_ids, from_user, 'username')
user_template = mgr.get_user(from_user_id, objectmask=user_mask)
# If we send the ID back to the API, an exception will be thrown
del user_template['id']
if template is not None:
try:
template_object = json.loads(template)
for key in template_object:
user_template[key] = template_object[key]
except ValueError as ex:
raise exceptions.ArgumentError("Unable to parse --template. %s" % ex)
user_template['username'] = username
if password == 'generate':
password = generate_password()
user_template['email'] = email
if not env.skip_confirmations:
table = formatting.KeyValueTable(['name', 'value'])
for key in user_template:
table.add_row([key, user_template[key]])
table.add_row(['password', password])
click.secho("You are about to create the following user...", fg='green')
env.fout(table)
if not formatting.confirm("Do you wish to continue?"):
raise exceptions.CLIAbort("Canceling creation!")
result = mgr.create_user(user_template, password)
table = formatting.Table(['Username', 'Email', 'Password'])
table.add_row([result['username'], result['email'], password])
env.fout(table)
def generate_password():
"""Returns a 23 character random string, with 3 special characters at the end"""
if sys.version_info > (3, 6):
import secrets # pylint: disable=import-error,import-outside-toplevel
alphabet = string.ascii_letters + string.digits
password = ''.join(secrets.choice(alphabet) for i in range(20))
special = ''.join(secrets.choice(string.punctuation) for i in range(3))
return password + special
else:
raise ImportError("Generating passwords require python 3.6 or higher")
| cli | identifier_name |
create.py | """Creates a user """
# :license: MIT, see LICENSE for more details.
import json
import string
import sys | import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('username')
@click.option('--email', '-e', required=True,
help="Email address for this user. Required for creation.")
@click.option('--password', '-p', default=None, show_default=True,
help="Password to set for this user. If no password is provided, user will be sent an email "
"to generate one, which expires in 24 hours. '-p generate' will create a password for you "
"(Requires Python 3.6+). Passwords require 8+ characters, upper and lowercase, a number "
"and a symbol.")
@click.option('--from-user', '-u', default=None,
help="Base user to use as a template for creating this user. "
"Will default to the user running this command. Information provided in --template "
"supersedes this template.")
@click.option('--template', '-t', default=None,
help="A json string describing https://softlayer.github.io/reference/datatypes/SoftLayer_User_Customer/")
@environment.pass_env
def cli(env, username, email, password, from_user, template):
"""Creates a user Users.
Remember to set the permissions and access for this new user.
Example::
slcli user create my@email.com -e my@email.com -p generate -a
-t '{"firstName": "Test", "lastName": "Testerson"}'
"""
mgr = SoftLayer.UserManager(env.client)
user_mask = ("mask[id, firstName, lastName, email, companyName, address1, city, country, postalCode, "
"state, userStatusId, timezoneId]")
from_user_id = None
if from_user is None:
user_template = mgr.get_current_user(objectmask=user_mask)
from_user_id = user_template['id']
else:
from_user_id = helpers.resolve_id(mgr.resolve_ids, from_user, 'username')
user_template = mgr.get_user(from_user_id, objectmask=user_mask)
# If we send the ID back to the API, an exception will be thrown
del user_template['id']
if template is not None:
try:
template_object = json.loads(template)
for key in template_object:
user_template[key] = template_object[key]
except ValueError as ex:
raise exceptions.ArgumentError("Unable to parse --template. %s" % ex)
user_template['username'] = username
if password == 'generate':
password = generate_password()
user_template['email'] = email
if not env.skip_confirmations:
table = formatting.KeyValueTable(['name', 'value'])
for key in user_template:
table.add_row([key, user_template[key]])
table.add_row(['password', password])
click.secho("You are about to create the following user...", fg='green')
env.fout(table)
if not formatting.confirm("Do you wish to continue?"):
raise exceptions.CLIAbort("Canceling creation!")
result = mgr.create_user(user_template, password)
table = formatting.Table(['Username', 'Email', 'Password'])
table.add_row([result['username'], result['email'], password])
env.fout(table)
def generate_password():
"""Returns a 23 character random string, with 3 special characters at the end"""
if sys.version_info > (3, 6):
import secrets # pylint: disable=import-error,import-outside-toplevel
alphabet = string.ascii_letters + string.digits
password = ''.join(secrets.choice(alphabet) for i in range(20))
special = ''.join(secrets.choice(string.punctuation) for i in range(3))
return password + special
else:
raise ImportError("Generating passwords require python 3.6 or higher") | random_line_split | |
create.py | """Creates a user """
# :license: MIT, see LICENSE for more details.
import json
import string
import sys
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('username')
@click.option('--email', '-e', required=True,
help="Email address for this user. Required for creation.")
@click.option('--password', '-p', default=None, show_default=True,
help="Password to set for this user. If no password is provided, user will be sent an email "
"to generate one, which expires in 24 hours. '-p generate' will create a password for you "
"(Requires Python 3.6+). Passwords require 8+ characters, upper and lowercase, a number "
"and a symbol.")
@click.option('--from-user', '-u', default=None,
help="Base user to use as a template for creating this user. "
"Will default to the user running this command. Information provided in --template "
"supersedes this template.")
@click.option('--template', '-t', default=None,
help="A json string describing https://softlayer.github.io/reference/datatypes/SoftLayer_User_Customer/")
@environment.pass_env
def cli(env, username, email, password, from_user, template):
"""Creates a user Users.
Remember to set the permissions and access for this new user.
Example::
slcli user create my@email.com -e my@email.com -p generate -a
-t '{"firstName": "Test", "lastName": "Testerson"}'
"""
mgr = SoftLayer.UserManager(env.client)
user_mask = ("mask[id, firstName, lastName, email, companyName, address1, city, country, postalCode, "
"state, userStatusId, timezoneId]")
from_user_id = None
if from_user is None:
|
else:
from_user_id = helpers.resolve_id(mgr.resolve_ids, from_user, 'username')
user_template = mgr.get_user(from_user_id, objectmask=user_mask)
# If we send the ID back to the API, an exception will be thrown
del user_template['id']
if template is not None:
try:
template_object = json.loads(template)
for key in template_object:
user_template[key] = template_object[key]
except ValueError as ex:
raise exceptions.ArgumentError("Unable to parse --template. %s" % ex)
user_template['username'] = username
if password == 'generate':
password = generate_password()
user_template['email'] = email
if not env.skip_confirmations:
table = formatting.KeyValueTable(['name', 'value'])
for key in user_template:
table.add_row([key, user_template[key]])
table.add_row(['password', password])
click.secho("You are about to create the following user...", fg='green')
env.fout(table)
if not formatting.confirm("Do you wish to continue?"):
raise exceptions.CLIAbort("Canceling creation!")
result = mgr.create_user(user_template, password)
table = formatting.Table(['Username', 'Email', 'Password'])
table.add_row([result['username'], result['email'], password])
env.fout(table)
def generate_password():
"""Returns a 23 character random string, with 3 special characters at the end"""
if sys.version_info > (3, 6):
import secrets # pylint: disable=import-error,import-outside-toplevel
alphabet = string.ascii_letters + string.digits
password = ''.join(secrets.choice(alphabet) for i in range(20))
special = ''.join(secrets.choice(string.punctuation) for i in range(3))
return password + special
else:
raise ImportError("Generating passwords require python 3.6 or higher")
| user_template = mgr.get_current_user(objectmask=user_mask)
from_user_id = user_template['id'] | conditional_block |
create.py | """Creates a user """
# :license: MIT, see LICENSE for more details.
import json
import string
import sys
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import exceptions
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
@click.command()
@click.argument('username')
@click.option('--email', '-e', required=True,
help="Email address for this user. Required for creation.")
@click.option('--password', '-p', default=None, show_default=True,
help="Password to set for this user. If no password is provided, user will be sent an email "
"to generate one, which expires in 24 hours. '-p generate' will create a password for you "
"(Requires Python 3.6+). Passwords require 8+ characters, upper and lowercase, a number "
"and a symbol.")
@click.option('--from-user', '-u', default=None,
help="Base user to use as a template for creating this user. "
"Will default to the user running this command. Information provided in --template "
"supersedes this template.")
@click.option('--template', '-t', default=None,
help="A json string describing https://softlayer.github.io/reference/datatypes/SoftLayer_User_Customer/")
@environment.pass_env
def cli(env, username, email, password, from_user, template):
"""Creates a user Users.
Remember to set the permissions and access for this new user.
Example::
slcli user create my@email.com -e my@email.com -p generate -a
-t '{"firstName": "Test", "lastName": "Testerson"}'
"""
mgr = SoftLayer.UserManager(env.client)
user_mask = ("mask[id, firstName, lastName, email, companyName, address1, city, country, postalCode, "
"state, userStatusId, timezoneId]")
from_user_id = None
if from_user is None:
user_template = mgr.get_current_user(objectmask=user_mask)
from_user_id = user_template['id']
else:
from_user_id = helpers.resolve_id(mgr.resolve_ids, from_user, 'username')
user_template = mgr.get_user(from_user_id, objectmask=user_mask)
# If we send the ID back to the API, an exception will be thrown
del user_template['id']
if template is not None:
try:
template_object = json.loads(template)
for key in template_object:
user_template[key] = template_object[key]
except ValueError as ex:
raise exceptions.ArgumentError("Unable to parse --template. %s" % ex)
user_template['username'] = username
if password == 'generate':
password = generate_password()
user_template['email'] = email
if not env.skip_confirmations:
table = formatting.KeyValueTable(['name', 'value'])
for key in user_template:
table.add_row([key, user_template[key]])
table.add_row(['password', password])
click.secho("You are about to create the following user...", fg='green')
env.fout(table)
if not formatting.confirm("Do you wish to continue?"):
raise exceptions.CLIAbort("Canceling creation!")
result = mgr.create_user(user_template, password)
table = formatting.Table(['Username', 'Email', 'Password'])
table.add_row([result['username'], result['email'], password])
env.fout(table)
def generate_password():
| """Returns a 23 character random string, with 3 special characters at the end"""
if sys.version_info > (3, 6):
import secrets # pylint: disable=import-error,import-outside-toplevel
alphabet = string.ascii_letters + string.digits
password = ''.join(secrets.choice(alphabet) for i in range(20))
special = ''.join(secrets.choice(string.punctuation) for i in range(3))
return password + special
else:
raise ImportError("Generating passwords require python 3.6 or higher") | identifier_body | |
main.py | #coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnQuitButton, id=wx.ID_EXIT)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def getSample(self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
|
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):
print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender
def onMouseClick(self, event):
if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = False
class ScopePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=(0, 256), size=(1024, 128))
self.mouseOver = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
class MyApp(wx.App):
def OnInit(self):
waveHandle = wavehandle.WaveHandler()
frame = MyFrame(None, "MiSynth Editor", waveHandle)
self.SetTopWindow(frame)
frame.Show(True)
return True
if __name__ == '__main__':
app = MyApp(redirect=True)
app.MainLoop()
| if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
if (self.shift < 10000000):
self.shift += 2000
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh() | identifier_body |
main.py | #coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnQuitButton, id=wx.ID_EXIT)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def getSample(self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
if (self.shift < 10000000):
self.shift += 2000
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh()
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):
print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender | if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = False
class ScopePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=(0, 256), size=(1024, 128))
self.mouseOver = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
class MyApp(wx.App):
def OnInit(self):
waveHandle = wavehandle.WaveHandler()
frame = MyFrame(None, "MiSynth Editor", waveHandle)
self.SetTopWindow(frame)
frame.Show(True)
return True
if __name__ == '__main__':
app = MyApp(redirect=True)
app.MainLoop() |
def onMouseClick(self, event): | random_line_split |
main.py | #coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnQuitButton, id=wx.ID_EXIT)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def getSample(self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
|
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh()
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):
print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender
def onMouseClick(self, event):
if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = False
class ScopePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=(0, 256), size=(1024, 128))
self.mouseOver = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
class MyApp(wx.App):
def OnInit(self):
waveHandle = wavehandle.WaveHandler()
frame = MyFrame(None, "MiSynth Editor", waveHandle)
self.SetTopWindow(frame)
frame.Show(True)
return True
if __name__ == '__main__':
app = MyApp(redirect=True)
app.MainLoop()
| if (self.shift < 10000000):
self.shift += 2000 | conditional_block |
main.py | #coding: utf-8
#!/usr/bin/env python3
#Initial test code for MiSynth Wave Generator
#Opens Wave Files And Cuts And Plays Them As The FPGA will
#Synth plays back 2048 samples at frequency of note
#Effective sample rate is 901,120Hz @ 440Hz
#CURRENTLY A DRAWING LOOP TO BE SOLVED, THANKS WX/PYTHON FOR YOUR
#COMPLETE LACK OF TRANSPARENCY
#ALWAYS USE TKINTER
import wave
import wx
import audiothread
import wavehandle
import sdisp
class MyFrame(wx.Frame):
def __init__(self, parent, title, wavehandle):
wx.Frame.__init__(self, parent, -1, title, size=(1024, 624))
self.wavehandle = wavehandle
self.scale = 8
self.shift = 0
self.drawcnt = 0
self.scope = [0]
# Create the menubar
menuBar = wx.MenuBar()
menu = wx.Menu()
menu.Append(wx.ID_OPEN, "Open\tAlt-O", "Open Wave")
menu.Append(wx.ID_EXIT, "E&xit\tAlt-X", "Exit")
# bind the menu event s
self.Bind(wx.EVT_MENU, self.OnOpenButton, id=wx.ID_OPEN)
self.Bind(wx.EVT_MENU, self.OnQuitButton, id=wx.ID_EXIT)
menuBar.Append(menu, "&Actions")
self.SetMenuBar(menuBar)
self.wavepanel = WavePanel(self, self.getscale, self.setsector)
self.wavepanel.SetBackgroundColour(wx.Colour(32,55,91))
self.scopepanel = ScopePanel(self)
self.scopepanel.SetBackgroundColour(wx.Colour(20,25,20))
self.buttonpanel = wx.Panel(self, -1, pos=(0, 384), size=(1024, 40))
self.textpanel = sdisp.TextPanel(self)
self.timestamp = wx.StaticText(self.wavepanel, -1,
("Time: " + str(0.0)
+ "/" + str(0.0)),
pos=(2, 2),
style=wx.ALIGN_LEFT)
self.timestamp.SetForegroundColour((217, 66, 244))
btnOpen = wx.Button(self.buttonpanel, wx.ID_OPEN, "Open",
pos=(2, 0), size=(80, 40))
btnExport = wx.Button(self.buttonpanel, -1, "Export",
pos=(84, 0), size=(80, 40))
btnQuit = wx.Button(self.buttonpanel, wx.ID_EXIT, "Quit",
pos=(166, 0), size=(80, 40))
self.btnPlay = wx.ToggleButton(self.buttonpanel, -1, "Play",
pos=(943, 0), size=(80, 40))
# bind the button events to handlers
self.Bind(wx.EVT_BUTTON, self.OnOpenButton, btnOpen)
self.Bind(wx.EVT_BUTTON, self.OnExportButton, btnExport)
self.Bind(wx.EVT_BUTTON, self.OnQuitButton, btnQuit)
self.Bind(wx.EVT_TOGGLEBUTTON, self.OnPlayButton, self.btnPlay)
self.Bind(wx.EVT_MOUSEWHEEL, self.onMouseWheel)
self.wavepanel.Bind(wx.EVT_PAINT, self.onPaint)
self.contentNotSaved = False
self.fileloaded = False
self.quadrant = -1
self.Centre()
def setsector(self, sector):
self.quadrant = abs(sector)
self.Refresh()
def getscale(self):
return self.scale
def | (self, sector):
print("obtaining sample")
if self.quadrant == -1:
self.setsector(1)
sample = self.wavehandle.getaudiodata(self.shift, 0, sector)
return sample
def onPaint(self, event):
self.drawcnt += 1
#print("Drawing" + str(self.drawcnt))
dc = wx.PaintDC(self.wavepanel)
dc.Clear()
totalseconds = self.wavehandle.gettotaltime()
shiftseconds = self.wavehandle.framestoseconds(self.shift)
self.timestamp.SetLabel("Time: " + str(shiftseconds) + "/" + str(
totalseconds))
dc.SetBrush(wx.Brush(wx.Colour(16, 28, 45), wx.SOLID))
dc.DrawRectangle(256, 0, 512, 256)
# Centre Line
pointdata = self.wavehandle.getdrawpoints(self.shift)
for x in range(1, 1024): # Ugly
if (x > 256) and (x < 768):
dc.SetPen(wx.Pen((0, 255, 242), 1, wx.PENSTYLE_SOLID))
else:
dc.SetPen(wx.Pen((183, 204, 163), 1, wx.PENSTYLE_SOLID))
dc.DrawLine(x - 1, pointdata[x - 1], x, pointdata[x])
#dc.DrawPoint(x, pointdata[x])
if (x == 256) or (x == 768):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
if (x == 496) or (x == 528):
dc.SetPen(wx.Pen((0, 0, 0), 1, wx.PENSTYLE_DOT))
dc.DrawLine(x, 0, x, 256)
dc = wx.PaintDC(self.scopepanel)
dc.Clear()
dc.SetPen(wx.Pen((256,0,0), 1, wx.PENSTYLE_SOLID))
for x in range(0, 1024):
if len(self.scope) > 1:
p = self.scope[x % len(self.scope)] + 64
else:
p = 64
dc.DrawPoint(x, p)
def OnPlayButton(self, event):
if self.btnPlay.GetValue():
self.audiohandle = audiothread.AudioHandler()
if self.fileloaded:
self.audiohandle.setsample(self.getSample(self.quadrant), 2048)
self.scope = self.audiohandle.getscopesample()
print("sample length: " + str(len(self.scope)))
self.audiohandle.start()
else:
self.audiohandle.stop()
self.audiohandle = None
def onMouseWheel(self, event):
if self.wavepanel.mouseOver:
if self.wavepanel.ctrlDown:
if event.GetWheelRotation() > 0:
if(self.scale > 1):
self.scale = self.scale >> 1
else:
if(self.scale < 2097151):
self.scale = self.scale << 1
self.Refresh()
else:
if event.GetWheelRotation() > 0:
if(self.shift > 0):
self.shift -= 2000
else:
if (self.shift < 10000000):
self.shift += 2000
self.Refresh()
if self.scopepanel.mouseOver:
if event.GetWheelRotation() > 0:
self.audiohandle.setshift(1)
else:
self.audiohandle.setshift(-1)
self.scope = self.audiohandle.getscopesample()
self.Refresh()
def OnOpenButton(self, evt):
#Open file
with wx.FileDialog(self, "Open .wav file.", wildcard="WAV files (*.wav)|*.wav",
style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return # the user changed their mind
pathname = fileDialog.GetPath()
try:
with wave.open(pathname, 'r') as file:
self.wavehandle.loadwave(file)
self.Refresh()
self.fileloaded = True
except IOError:
wx.LogError("Cannot open file '%s'." % pathname)
def OnExportButton(self, evt):
print("Export")
def OnQuitButton(self, evt):
self.Close()
class WavePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent, getter, sender):
wx.Panel.__init__(self, parent, pos=(0,0),size=(1024, 256))
self.mouseOver = False
self.ctrlDown = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
self.Bind(wx.EVT_KEY_DOWN, self.onKeyPress)
self.Bind(wx.EVT_KEY_UP, self.onKeyRelease)
self.Bind(wx.EVT_LEFT_DOWN, self.onMouseClick)
self.getter = getter
self.sender = sender
def onMouseClick(self, event):
if self.mouseOver:
x, y = self.ScreenToClient(wx.GetMousePosition())
sector = abs(x // (2048 / self.getter()))
self.sender(sector)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
def onKeyPress(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = True
def onKeyRelease(self, event):
keycode = event.GetKeyCode()
if keycode == wx.WXK_CONTROL:
self.ctrlDown = False
class ScopePanel(wx.Panel): #just handles mouseover events
def __init__(self, parent):
wx.Panel.__init__(self, parent, pos=(0, 256), size=(1024, 128))
self.mouseOver = False
self.Bind(wx.EVT_ENTER_WINDOW, self.onMouseOver)
self.Bind(wx.EVT_LEAVE_WINDOW, self.onMouseLeave)
def onMouseOver(self, event):
self.mouseOver = True
def onMouseLeave(self, event):
self.mouseOver = False
class MyApp(wx.App):
def OnInit(self):
waveHandle = wavehandle.WaveHandler()
frame = MyFrame(None, "MiSynth Editor", waveHandle)
self.SetTopWindow(frame)
frame.Show(True)
return True
if __name__ == '__main__':
app = MyApp(redirect=True)
app.MainLoop()
| getSample | identifier_name |
exames.server.controller.js | 'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
Exame = mongoose.model('Exame'),
_ = require('lodash');
/**
* Create a Exame
*/
exports.create = function(req, res) {
var exame = new Exame(req.body);
exame.user = req.user;
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
exports.addPergunta= function(exameId,perguntaId){
Exame.findById(exameId).exec(function(err,exame){
if(err){
console.log('erro finding exam first');
return;
}
else{
if(!exame){
// console.log('exam not found'+exameId);
return;
}
var exame1=exame.toObject();
exame1._perguntas.push(perguntaId);
exame = _.extend(exame , exame1);
exame.save(function(err) {
if (err) {
//console.log('erro ao salvar');
return;
} else {
//console.log('sucesso');
}
});
}
});
};
exports.listar = function(req, res) {
Exame.find().select('id ano').exec(function (err,exames) {
// body...
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
res.jsonp(exames);
}
});
};
/**
* Show the current Exame
*/
exports.read = function(req, res) {
Exame.findById(req.params.exameId).populate({path:'_perguntas',model:'Pergunta'}).populate('disciplina').exec(function(err,exame){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
if(!exame){
return res.status(404).send({message:'Exame nao encontrado'});
}
Exame.populate(exame._perguntas,{
path:'_ajuda',
model:'Ajuda'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
exame._ajuda=docs;
});
Exame.populate(exame._perguntas,{
path:'_alternativas',
model:'Alternativa'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
// console.log(docs.toObject());
// exame._perguntas=docs;
res.jsonp(exame); //exame=docs;
});
//res.jsonp(exame);
}
});
};
/**
* Exame middleware
*/
// exports.exameByID = function(req, res, next, id) {
// Exame.findById(id).populate('_perguntas').exec(function(err, exame) {
// //Exame.findById(id).deepPopulate('_perguntas.alternativas').exec(function(err, exame) {
// if (err) return next(err);
// if (! exame) return next(new Error('Failed to load Exame ' + id));
// req.exame = exame ;
// next();
// });
// };
/**
* Update a Exame
*/
exports.update = function(req, res) {
var exame = req.exame ;
| exame = _.extend(exame , req.body);
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* Delete an Exame
*/
exports.delete = function(req, res) {
var exame = req.exame ;
exame.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* List of Exames
*/
exports.list = function(req, res) {
Exame.find().select('id ano disciplina').populate('disciplina','name').sort({ano:-1}).exec(function(err, exames) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exames);
}
});
};
/**
* Exame authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.exame.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
}; | random_line_split | |
exames.server.controller.js | 'use strict';
/**
* Module dependencies.
*/
var mongoose = require('mongoose'),
errorHandler = require('./errors.server.controller'),
Exame = mongoose.model('Exame'),
_ = require('lodash');
/**
* Create a Exame
*/
exports.create = function(req, res) {
var exame = new Exame(req.body);
exame.user = req.user;
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
exports.addPergunta= function(exameId,perguntaId){
Exame.findById(exameId).exec(function(err,exame){
if(err){
console.log('erro finding exam first');
return;
}
else{
if(!exame){
// console.log('exam not found'+exameId);
return;
}
var exame1=exame.toObject();
exame1._perguntas.push(perguntaId);
exame = _.extend(exame , exame1);
exame.save(function(err) {
if (err) {
//console.log('erro ao salvar');
return;
} else {
//console.log('sucesso');
}
});
}
});
};
exports.listar = function(req, res) {
Exame.find().select('id ano').exec(function (err,exames) {
// body...
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
res.jsonp(exames);
}
});
};
/**
* Show the current Exame
*/
exports.read = function(req, res) {
Exame.findById(req.params.exameId).populate({path:'_perguntas',model:'Pergunta'}).populate('disciplina').exec(function(err,exame){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
else{
if(!exame){
return res.status(404).send({message:'Exame nao encontrado'});
}
Exame.populate(exame._perguntas,{
path:'_ajuda',
model:'Ajuda'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
exame._ajuda=docs;
});
Exame.populate(exame._perguntas,{
path:'_alternativas',
model:'Alternativa'},
function(err,docs){
if(err){
return res.status(400).send({message:errorHandler.getErrorMessage(err)});
}
// console.log(docs.toObject());
// exame._perguntas=docs;
res.jsonp(exame); //exame=docs;
});
//res.jsonp(exame);
}
});
};
/**
* Exame middleware
*/
// exports.exameByID = function(req, res, next, id) {
// Exame.findById(id).populate('_perguntas').exec(function(err, exame) {
// //Exame.findById(id).deepPopulate('_perguntas.alternativas').exec(function(err, exame) {
// if (err) return next(err);
// if (! exame) return next(new Error('Failed to load Exame ' + id));
// req.exame = exame ;
// next();
// });
// };
/**
* Update a Exame
*/
exports.update = function(req, res) {
var exame = req.exame ;
exame = _.extend(exame , req.body);
exame.save(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* Delete an Exame
*/
exports.delete = function(req, res) {
var exame = req.exame ;
exame.remove(function(err) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else {
res.jsonp(exame);
}
});
};
/**
* List of Exames
*/
exports.list = function(req, res) {
Exame.find().select('id ano disciplina').populate('disciplina','name').sort({ano:-1}).exec(function(err, exames) {
if (err) {
return res.status(400).send({
message: errorHandler.getErrorMessage(err)
});
} else |
});
};
/**
* Exame authorization middleware
*/
exports.hasAuthorization = function(req, res, next) {
if (req.exame.user.id !== req.user.id) {
return res.status(403).send('User is not authorized');
}
next();
};
| {
res.jsonp(exames);
} | conditional_block |
global.ts | import { css, Theme } from "@emotion/react";
import "@fontsource/roboto/400.css";
import "@fontsource/roboto/500.css";
import "@fontsource/roboto/700.css";
import "@fontsource/roboto/900.css";
export const global = (theme: Theme) => css` | box-sizing: border-box;
}
/** Remove this when bootstrap is removed **/
html {
scroll-behavior: revert !important;
}
body,
html {
margin: 0;
padding: 0;
font-family: "Roboto", sans-serif;
color: ${theme.fontColor.normal};
font-size: 16px;
}
body {
background: ${theme.color.background};
overflow-y: scroll;
}
a {
text-decoration: none;
color: inherit;
}
h1,
h2,
h3,
h4,
h5,
figure,
figcaption,
li,
ul,
ol,
blockquote {
margin: 0;
padding: 0;
margin-top: 0px;
list-style: none;
line-height: initial;
}
h1 {
font-size: 1.8em;
}
h2 {
font-weight: 700;
font-size: 1.5em;
}
h3 {
font-size: 1.2em;
font-weight: 600;
}
p {
font-size: 1em;
}
button {
padding: 0;
margin: 0;
border: none;
background: none;
color: inherit;
text-align: inherit;
box-sizing: inherit;
cursor: pointer;
font: inherit;
-webkit-appearance: none;
-moz-appearance: none;
appearance: none;
}
input,
label,
select,
button,
textarea {
margin: 0;
border: 0;
padding: 0;
display: inline-block;
vertical-align: middle;
white-space: normal;
background: none;
line-height: 1;
}
`; | * { | random_line_split |
test.js | 'use strict';
module.exports = {
db: 'mongodb://localhost/angle-test',
port: 3001,
app: {
title: 'Angle - Test Environment' | clientSecret: process.env.FACEBOOK_SECRET || 'APP_SECRET',
callbackURL: '/auth/facebook/callback'
},
twitter: {
clientID: process.env.TWITTER_KEY || 'CONSUMER_KEY',
clientSecret: process.env.TWITTER_SECRET || 'CONSUMER_SECRET',
callbackURL: '/auth/twitter/callback'
},
google: {
clientID: process.env.GOOGLE_ID || 'APP_ID',
clientSecret: process.env.GOOGLE_SECRET || 'APP_SECRET',
callbackURL: '/auth/google/callback'
},
linkedin: {
clientID: process.env.LINKEDIN_ID || 'APP_ID',
clientSecret: process.env.LINKEDIN_SECRET || 'APP_SECRET',
callbackURL: '/auth/linkedin/callback'
},
github: {
clientID: process.env.GITHUB_ID || 'APP_ID',
clientSecret: process.env.GITHUB_SECRET || 'APP_SECRET',
callbackURL: '/auth/github/callback'
},
mailer: {
from: process.env.MAILER_FROM || 'MAILER_FROM',
options: {
service: process.env.MAILER_SERVICE_PROVIDER || 'MAILER_SERVICE_PROVIDER',
auth: {
user: process.env.MAILER_EMAIL_ID || 'MAILER_EMAIL_ID',
pass: process.env.MAILER_PASSWORD || 'MAILER_PASSWORD'
}
}
}
}; | },
facebook: {
clientID: process.env.FACEBOOK_ID || 'APP_ID', | random_line_split |
chorus.js | import {createCustomOsc} from './oscillator'
export function createChorus(ctx: Object) {
const merger = ctx.createChannelMerger(2)
const input = ctx.createGain()
const output = ctx.createGain()
const feedbackL = ctx.createGain()
const feedbackR = ctx.createGain()
const delayL = ctx.createDelay()
const delayR = ctx.createDelay()
const lfoLGain = ctx.createGain()
const lfoRGain = ctx.createGain()
const lfoL = createCustomOsc(ctx)
const lfoR = createCustomOsc(ctx)
input.connect(output)
input.connect(delayL)
input.connect(delayR)
lfoL.connect(lfoLGain)
lfoL.start(0)
lfoR.connect(lfoRGain)
lfoR.start(0)
lfoRGain.connect(delayR.delayTime)
lfoLGain.connect(delayL.delayTime)
lfoLGain.connect(lfoR.frequency)
delayL.connect(feedbackL)
delayR.connect(feedbackR)
feedbackL.connect(delayL)
feedbackR.connect(delayR)
feedbackL.connect(merger, 0, 0)
feedbackR.connect(merger, 0, 1)
merger.connect(output)
const setters = {
frequency(value) {
lfoL.frequency.value = value
lfoR.frequency.value = value
},
feedback(value) {
feedbackL.gain.value = value
feedbackR.gain.value = value
}
}
return {
__setters: setters,
input,
connect(node) { output.connect(node) },
init(patch) {
lfoL.init({
frequency: patch.frequency,
type: patch.type,
phase: 180
})
lfoR.init({
frequency: patch.frequency,
type: patch.type,
phase: 0
})
// TODO: Add public access to chorus depth here
input.gain.value = 0.6934
lfoLGain.gain.value = 0.002
lfoRGain.gain.value = 0.002
feedbackL.gain.value = patch.feedback
feedbackR.gain.value = patch.feedback
}, | }
}
} | set(key, val) {
setters[key](val) | random_line_split |
chorus.js | import {createCustomOsc} from './oscillator'
export function createChorus(ctx: Object) {
const merger = ctx.createChannelMerger(2)
const input = ctx.createGain()
const output = ctx.createGain()
const feedbackL = ctx.createGain()
const feedbackR = ctx.createGain()
const delayL = ctx.createDelay()
const delayR = ctx.createDelay()
const lfoLGain = ctx.createGain()
const lfoRGain = ctx.createGain()
const lfoL = createCustomOsc(ctx)
const lfoR = createCustomOsc(ctx)
input.connect(output)
input.connect(delayL)
input.connect(delayR)
lfoL.connect(lfoLGain)
lfoL.start(0)
lfoR.connect(lfoRGain)
lfoR.start(0)
lfoRGain.connect(delayR.delayTime)
lfoLGain.connect(delayL.delayTime)
lfoLGain.connect(lfoR.frequency)
delayL.connect(feedbackL)
delayR.connect(feedbackR)
feedbackL.connect(delayL)
feedbackR.connect(delayR)
feedbackL.connect(merger, 0, 0)
feedbackR.connect(merger, 0, 1)
merger.connect(output)
const setters = {
frequency(value) {
lfoL.frequency.value = value
lfoR.frequency.value = value
},
feedback(value) {
feedbackL.gain.value = value
feedbackR.gain.value = value
}
}
return {
__setters: setters,
input,
| (node) { output.connect(node) },
init(patch) {
lfoL.init({
frequency: patch.frequency,
type: patch.type,
phase: 180
})
lfoR.init({
frequency: patch.frequency,
type: patch.type,
phase: 0
})
// TODO: Add public access to chorus depth here
input.gain.value = 0.6934
lfoLGain.gain.value = 0.002
lfoRGain.gain.value = 0.002
feedbackL.gain.value = patch.feedback
feedbackR.gain.value = patch.feedback
},
set(key, val) {
setters[key](val)
}
}
}
| connect | identifier_name |
chorus.js | import {createCustomOsc} from './oscillator'
export function createChorus(ctx: Object) | {
const merger = ctx.createChannelMerger(2)
const input = ctx.createGain()
const output = ctx.createGain()
const feedbackL = ctx.createGain()
const feedbackR = ctx.createGain()
const delayL = ctx.createDelay()
const delayR = ctx.createDelay()
const lfoLGain = ctx.createGain()
const lfoRGain = ctx.createGain()
const lfoL = createCustomOsc(ctx)
const lfoR = createCustomOsc(ctx)
input.connect(output)
input.connect(delayL)
input.connect(delayR)
lfoL.connect(lfoLGain)
lfoL.start(0)
lfoR.connect(lfoRGain)
lfoR.start(0)
lfoRGain.connect(delayR.delayTime)
lfoLGain.connect(delayL.delayTime)
lfoLGain.connect(lfoR.frequency)
delayL.connect(feedbackL)
delayR.connect(feedbackR)
feedbackL.connect(delayL)
feedbackR.connect(delayR)
feedbackL.connect(merger, 0, 0)
feedbackR.connect(merger, 0, 1)
merger.connect(output)
const setters = {
frequency(value) {
lfoL.frequency.value = value
lfoR.frequency.value = value
},
feedback(value) {
feedbackL.gain.value = value
feedbackR.gain.value = value
}
}
return {
__setters: setters,
input,
connect(node) { output.connect(node) },
init(patch) {
lfoL.init({
frequency: patch.frequency,
type: patch.type,
phase: 180
})
lfoR.init({
frequency: patch.frequency,
type: patch.type,
phase: 0
})
// TODO: Add public access to chorus depth here
input.gain.value = 0.6934
lfoLGain.gain.value = 0.002
lfoRGain.gain.value = 0.002
feedbackL.gain.value = patch.feedback
feedbackR.gain.value = patch.feedback
},
set(key, val) {
setters[key](val)
}
}
} | identifier_body | |
csvportal.py | import arcpy, os, json, csv
from portal import additem, shareItem, generateToken, getUserContent, updateItem, getGroupID, deleteItem, getGroupContent
from metadata import metadata
from ESRImapservice import ESRImapservice
class csvportal(object):
def __init__(self, user, password, portal, worksspace, groups=[]):
"""Connect to portal with username and pasword, also set the local workspace"""
self.user = user
self.password = password
self.portal = portal
self.groups = groups
self.token = generateToken(self.user, self.password, self.portal)
self.groupIDs = [getGroupID(g, self.token, self.portal) for g in self.groups]
if len(self.groupIDs) == 0:
self.userContent = getUserContent(user, '', self.token, self.portal )
else:
self.userContent = getGroupContent(self.groups[0], self.token, self.portal)
self.existingIDs = { n['title'] : n['id'] for n in self.userContent["items"]}
self.LayersFoundinMXD = []
self.ws = worksspace
if worksspace: arcpy.env.workspace = worksspace
def updateToken(self):
"""refresh the token, might be necessary if becomes invalid"""
self.token = generateToken(self.user, self.password, self.portal)
return self.token
def uploadCsv(self, csvpath, sep=";", headerlines=1, nameCol=0, pathCol=1, urlCol=2):
"""upload every row in a csv"""
with open( csvpath , 'rb') as csvfile:
nr = 0
csv_reader = csv.reader(csvfile, dialect=csv.excel, delimiter=sep)
for n in range(headerlines): csv_reader.next()
for row in csv_reader:
line = [unicode(cell, 'latin-1') for cell in row]
name, ds, url = (line[nameCol], line[pathCol], line[urlCol])
if self.ws and os.path.dirname(ds).endswith('.sde'):
ds = os.path.join(self.ws , os.path.basename(ds) )
self.addLyr(ds, name, url, self.groupIDs)
#generate new token every 50 uses
if not nr%50 : self.token = generateToken(self.user, self.password, self.portal)
nr += 1
##TODO: DELETE layers in group and not in csv
def addLyr(self, dataSource, name, serviceUrl, groupIDs=[]):
"""Add *dataSource* to *portal* for *user* , as a item with *name*
representing a layer in *service* """
meta = metadata.metadataFromArcgis( dataSource )
author = meta.credits if len( meta.credits ) else "Stad Antwerpen"
descrip = ( "<strong>"+ meta.title +"</strong> <div><em>"+
meta.orgname + "</em></div> " + meta.description +
"\n<br/> Creatiedatum: " + meta.createDate +
"\n<br/> Publicatiedatum: " + meta.pubDate +
"\n<br/> Revisiedatum: " + meta.reviseDate +
"\n<br/> Beheer: " + meta.contacts +
"\n<br/> Contact: " + meta.eMails )
if name in self.existingIDs.keys():
self.LayersFoundinMXD.append(name)
arcpy.AddMessage( "updating " + name )
item = updateItem(self.user, self.token, self.portal, self.existingIDs[name], serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags))
else:
arcpy.AddMessage( "adding " + name )
item = additem(self.user, self.token, self.portal, serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags) )
if "success" in item.keys() and item["success"]:
id = item["id"]
arcpy.AddMessage( shareItem(id, self.token, self.portal, True, True, groupIDs) )
elif "success" in item.keys() and not item["success"]:
raise Exception( "Error uploading "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result))
def delLyr(self, name):
| if name in self.existingIDs.keys():
result = deleteItem(self.existingIDs[name] , self.token, self.portal, self.user)
if "success" in result.keys() and result["success"]:
arcpy.AddMessage("Deleted layer: " + name )
elif "success" in result.keys() and not result["success"]:
raise Exception( "Error deleting "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result)) | identifier_body | |
csvportal.py | import arcpy, os, json, csv
from portal import additem, shareItem, generateToken, getUserContent, updateItem, getGroupID, deleteItem, getGroupContent
from metadata import metadata
from ESRImapservice import ESRImapservice
class csvportal(object):
def __init__(self, user, password, portal, worksspace, groups=[]):
"""Connect to portal with username and pasword, also set the local workspace"""
self.user = user
self.password = password
self.portal = portal
self.groups = groups
self.token = generateToken(self.user, self.password, self.portal)
self.groupIDs = [getGroupID(g, self.token, self.portal) for g in self.groups]
if len(self.groupIDs) == 0:
self.userContent = getUserContent(user, '', self.token, self.portal )
else:
self.userContent = getGroupContent(self.groups[0], self.token, self.portal)
self.existingIDs = { n['title'] : n['id'] for n in self.userContent["items"]}
self.LayersFoundinMXD = []
self.ws = worksspace
if worksspace: arcpy.env.workspace = worksspace
def updateToken(self):
"""refresh the token, might be necessary if becomes invalid"""
self.token = generateToken(self.user, self.password, self.portal)
return self.token
def uploadCsv(self, csvpath, sep=";", headerlines=1, nameCol=0, pathCol=1, urlCol=2):
"""upload every row in a csv"""
with open( csvpath , 'rb') as csvfile:
nr = 0
csv_reader = csv.reader(csvfile, dialect=csv.excel, delimiter=sep)
for n in range(headerlines): csv_reader.next()
for row in csv_reader:
line = [unicode(cell, 'latin-1') for cell in row]
name, ds, url = (line[nameCol], line[pathCol], line[urlCol])
if self.ws and os.path.dirname(ds).endswith('.sde'):
ds = os.path.join(self.ws , os.path.basename(ds) )
self.addLyr(ds, name, url, self.groupIDs)
#generate new token every 50 uses
if not nr%50 : self.token = generateToken(self.user, self.password, self.portal)
nr += 1
##TODO: DELETE layers in group and not in csv
def addLyr(self, dataSource, name, serviceUrl, groupIDs=[]):
"""Add *dataSource* to *portal* for *user* , as a item with *name*
representing a layer in *service* """
meta = metadata.metadataFromArcgis( dataSource )
author = meta.credits if len( meta.credits ) else "Stad Antwerpen"
descrip = ( "<strong>"+ meta.title +"</strong> <div><em>"+
meta.orgname + "</em></div> " + meta.description +
"\n<br/> Creatiedatum: " + meta.createDate +
"\n<br/> Publicatiedatum: " + meta.pubDate +
"\n<br/> Revisiedatum: " + meta.reviseDate +
"\n<br/> Beheer: " + meta.contacts +
"\n<br/> Contact: " + meta.eMails )
if name in self.existingIDs.keys():
self.LayersFoundinMXD.append(name)
arcpy.AddMessage( "updating " + name )
item = updateItem(self.user, self.token, self.portal, self.existingIDs[name], serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags))
else:
arcpy.AddMessage( "adding " + name )
item = additem(self.user, self.token, self.portal, serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags) )
if "success" in item.keys() and item["success"]:
id = item["id"]
arcpy.AddMessage( shareItem(id, self.token, self.portal, True, True, groupIDs) )
elif "success" in item.keys() and not item["success"]:
raise Exception( "Error uploading "+ name +" "+ json.dumps(result))
else:
|
def delLyr(self, name):
if name in self.existingIDs.keys():
result = deleteItem(self.existingIDs[name] , self.token, self.portal, self.user)
if "success" in result.keys() and result["success"]:
arcpy.AddMessage("Deleted layer: " + name )
elif "success" in result.keys() and not result["success"]:
raise Exception( "Error deleting "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result)) | arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result)) | conditional_block |
csvportal.py | from ESRImapservice import ESRImapservice
class csvportal(object):
def __init__(self, user, password, portal, worksspace, groups=[]):
"""Connect to portal with username and pasword, also set the local workspace"""
self.user = user
self.password = password
self.portal = portal
self.groups = groups
self.token = generateToken(self.user, self.password, self.portal)
self.groupIDs = [getGroupID(g, self.token, self.portal) for g in self.groups]
if len(self.groupIDs) == 0:
self.userContent = getUserContent(user, '', self.token, self.portal )
else:
self.userContent = getGroupContent(self.groups[0], self.token, self.portal)
self.existingIDs = { n['title'] : n['id'] for n in self.userContent["items"]}
self.LayersFoundinMXD = []
self.ws = worksspace
if worksspace: arcpy.env.workspace = worksspace
def updateToken(self):
"""refresh the token, might be necessary if becomes invalid"""
self.token = generateToken(self.user, self.password, self.portal)
return self.token
def uploadCsv(self, csvpath, sep=";", headerlines=1, nameCol=0, pathCol=1, urlCol=2):
"""upload every row in a csv"""
with open( csvpath , 'rb') as csvfile:
nr = 0
csv_reader = csv.reader(csvfile, dialect=csv.excel, delimiter=sep)
for n in range(headerlines): csv_reader.next()
for row in csv_reader:
line = [unicode(cell, 'latin-1') for cell in row]
name, ds, url = (line[nameCol], line[pathCol], line[urlCol])
if self.ws and os.path.dirname(ds).endswith('.sde'):
ds = os.path.join(self.ws , os.path.basename(ds) )
self.addLyr(ds, name, url, self.groupIDs)
#generate new token every 50 uses
if not nr%50 : self.token = generateToken(self.user, self.password, self.portal)
nr += 1
##TODO: DELETE layers in group and not in csv
def addLyr(self, dataSource, name, serviceUrl, groupIDs=[]):
"""Add *dataSource* to *portal* for *user* , as a item with *name*
representing a layer in *service* """
meta = metadata.metadataFromArcgis( dataSource )
author = meta.credits if len( meta.credits ) else "Stad Antwerpen"
descrip = ( "<strong>"+ meta.title +"</strong> <div><em>"+
meta.orgname + "</em></div> " + meta.description +
"\n<br/> Creatiedatum: " + meta.createDate +
"\n<br/> Publicatiedatum: " + meta.pubDate +
"\n<br/> Revisiedatum: " + meta.reviseDate +
"\n<br/> Beheer: " + meta.contacts +
"\n<br/> Contact: " + meta.eMails )
if name in self.existingIDs.keys():
self.LayersFoundinMXD.append(name)
arcpy.AddMessage( "updating " + name )
item = updateItem(self.user, self.token, self.portal, self.existingIDs[name], serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags))
else:
arcpy.AddMessage( "adding " + name )
item = additem(self.user, self.token, self.portal, serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags) )
if "success" in item.keys() and item["success"]:
id = item["id"]
arcpy.AddMessage( shareItem(id, self.token, self.portal, True, True, groupIDs) )
elif "success" in item.keys() and not item["success"]:
raise Exception( "Error uploading "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result))
def delLyr(self, name):
if name in self.existingIDs.keys():
result = deleteItem(self.existingIDs[name] , self.token, self.portal, self.user)
if "success" in result.keys() and result["success"]:
arcpy.AddMessage("Deleted layer: " + name )
elif "success" in result.keys() and not result["success"]:
raise Exception( "Error deleting "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result)) | import arcpy, os, json, csv
from portal import additem, shareItem, generateToken, getUserContent, updateItem, getGroupID, deleteItem, getGroupContent
from metadata import metadata
| random_line_split | |
csvportal.py | import arcpy, os, json, csv
from portal import additem, shareItem, generateToken, getUserContent, updateItem, getGroupID, deleteItem, getGroupContent
from metadata import metadata
from ESRImapservice import ESRImapservice
class csvportal(object):
def __init__(self, user, password, portal, worksspace, groups=[]):
"""Connect to portal with username and pasword, also set the local workspace"""
self.user = user
self.password = password
self.portal = portal
self.groups = groups
self.token = generateToken(self.user, self.password, self.portal)
self.groupIDs = [getGroupID(g, self.token, self.portal) for g in self.groups]
if len(self.groupIDs) == 0:
self.userContent = getUserContent(user, '', self.token, self.portal )
else:
self.userContent = getGroupContent(self.groups[0], self.token, self.portal)
self.existingIDs = { n['title'] : n['id'] for n in self.userContent["items"]}
self.LayersFoundinMXD = []
self.ws = worksspace
if worksspace: arcpy.env.workspace = worksspace
def updateToken(self):
"""refresh the token, might be necessary if becomes invalid"""
self.token = generateToken(self.user, self.password, self.portal)
return self.token
def uploadCsv(self, csvpath, sep=";", headerlines=1, nameCol=0, pathCol=1, urlCol=2):
"""upload every row in a csv"""
with open( csvpath , 'rb') as csvfile:
nr = 0
csv_reader = csv.reader(csvfile, dialect=csv.excel, delimiter=sep)
for n in range(headerlines): csv_reader.next()
for row in csv_reader:
line = [unicode(cell, 'latin-1') for cell in row]
name, ds, url = (line[nameCol], line[pathCol], line[urlCol])
if self.ws and os.path.dirname(ds).endswith('.sde'):
ds = os.path.join(self.ws , os.path.basename(ds) )
self.addLyr(ds, name, url, self.groupIDs)
#generate new token every 50 uses
if not nr%50 : self.token = generateToken(self.user, self.password, self.portal)
nr += 1
##TODO: DELETE layers in group and not in csv
def addLyr(self, dataSource, name, serviceUrl, groupIDs=[]):
"""Add *dataSource* to *portal* for *user* , as a item with *name*
representing a layer in *service* """
meta = metadata.metadataFromArcgis( dataSource )
author = meta.credits if len( meta.credits ) else "Stad Antwerpen"
descrip = ( "<strong>"+ meta.title +"</strong> <div><em>"+
meta.orgname + "</em></div> " + meta.description +
"\n<br/> Creatiedatum: " + meta.createDate +
"\n<br/> Publicatiedatum: " + meta.pubDate +
"\n<br/> Revisiedatum: " + meta.reviseDate +
"\n<br/> Beheer: " + meta.contacts +
"\n<br/> Contact: " + meta.eMails )
if name in self.existingIDs.keys():
self.LayersFoundinMXD.append(name)
arcpy.AddMessage( "updating " + name )
item = updateItem(self.user, self.token, self.portal, self.existingIDs[name], serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags))
else:
arcpy.AddMessage( "adding " + name )
item = additem(self.user, self.token, self.portal, serviceUrl,
title=name, summary=meta.purpose, description=descrip, author=author, tags=",".join(meta.tags) )
if "success" in item.keys() and item["success"]:
id = item["id"]
arcpy.AddMessage( shareItem(id, self.token, self.portal, True, True, groupIDs) )
elif "success" in item.keys() and not item["success"]:
raise Exception( "Error uploading "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result))
def | (self, name):
if name in self.existingIDs.keys():
result = deleteItem(self.existingIDs[name] , self.token, self.portal, self.user)
if "success" in result.keys() and result["success"]:
arcpy.AddMessage("Deleted layer: " + name )
elif "success" in result.keys() and not result["success"]:
raise Exception( "Error deleting "+ name +" "+ json.dumps(result))
else:
arcpy.AddMessage("unsure of success for layer "+ name +" "+ json.dumps(result)) | delLyr | identifier_name |
collection.js | /*
* collection
* A collection of posts
*
* If fetch items are specified, then only gets those posts.
* Otherwise, gets the posts specified from a configuration endpoint.
*/
define([
"lodash",
"backbone",
"helpers/urls",
"helpers/types",
"helpers/params",
"components/content/entities/parser",
"module"
], function(_, Backbone, urls, types, params, parser, module) {
// Definition of a post collection
var PostCollection = Backbone.Collection.extend({
urlRoot: module.config().urlRoot,
initialize: function(models, options) {
options = options || {};
// preserve any options specified to constructor
this.options = _.extend(this.options || {}, options);
},
// remove fetch items as they become models
_maintainItems: function(model) {
this.options.items = _.reject(this.options.items, function(item) {
return item.object_id == model[types.objectIdType(item.object_id)];
});
if (this.options.items.length === 0) |
},
// merge in additional fetch items after initialize
mergeItems: function(items) {
// remove any new fetch items that are already fetched
items = _.reject(items, function(item) {
return this.get(item.object_id);
}, this);
// create a union of previous fetch items and the new fetch items
this.options.items = _.union(this.options.items, items);
},
url: function() {
var fetchItems = this.options.items;
// if fetch items are specified, get the specific items
// object_ids all have to be homogenous (same types)
if (fetchItems && fetchItems.length > 0) {
var method = types.objectIdType(fetchItems[0].object_id);
var posts = params.collection[method](_.pluck(fetchItems, "object_id"));
// maintain the fetchItems as they are added
this.on("add", this._maintainItems, this);
return urls.normalizeUrlRoot(this.urlRoot) +
"?post_type=any" +
"&"+posts +
"&"+params.meta.custom_fields;
} else {
return module.config().endpoint;
}
},
parse: function(data) {
return parser(data);
}
});
return PostCollection;
}); | {
this.off("add", this.maintainItems, this);
} | conditional_block |
collection.js | /*
* collection
* A collection of posts
*
* If fetch items are specified, then only gets those posts.
* Otherwise, gets the posts specified from a configuration endpoint.
*/
define([
"lodash",
"backbone",
"helpers/urls",
"helpers/types",
"helpers/params",
"components/content/entities/parser",
"module"
], function(_, Backbone, urls, types, params, parser, module) {
// Definition of a post collection
var PostCollection = Backbone.Collection.extend({
urlRoot: module.config().urlRoot,
initialize: function(models, options) {
options = options || {};
// preserve any options specified to constructor
this.options = _.extend(this.options || {}, options);
},
// remove fetch items as they become models
_maintainItems: function(model) {
this.options.items = _.reject(this.options.items, function(item) {
return item.object_id == model[types.objectIdType(item.object_id)];
});
if (this.options.items.length === 0) {
this.off("add", this.maintainItems, this);
}
},
// merge in additional fetch items after initialize
mergeItems: function(items) {
// remove any new fetch items that are already fetched
items = _.reject(items, function(item) {
return this.get(item.object_id);
}, this);
// create a union of previous fetch items and the new fetch items
this.options.items = _.union(this.options.items, items);
},
url: function() {
var fetchItems = this.options.items;
// if fetch items are specified, get the specific items
// object_ids all have to be homogenous (same types)
if (fetchItems && fetchItems.length > 0) {
var method = types.objectIdType(fetchItems[0].object_id);
var posts = params.collection[method](_.pluck(fetchItems, "object_id"));
// maintain the fetchItems as they are added
this.on("add", this._maintainItems, this);
return urls.normalizeUrlRoot(this.urlRoot) +
"?post_type=any" +
"&"+posts +
"&"+params.meta.custom_fields;
} else {
return module.config().endpoint;
}
},
parse: function(data) { | }); | return parser(data);
}
});
return PostCollection; | random_line_split |
cipher.rs | use random::{thread_rng, sample};
use serialize::hex::{FromHex, ToHex};
const BLOCK_SIZE_EXP: u32 = 3; // pow(2, 3) == 8 byte blocks
pub enum Mode {
Encrypt,
Decrypt,
}
// Invokes the helper functions and does its shifting thing
pub fn zombify(mode: Mode, data: &[u8], key: &str) -> Vec<u8> {
let hexed_key = key.as_bytes().to_hex();
let amount = hexed_key.as_bytes()
.iter()
.fold(0u8, |amt, &byte| amt.wrapping_add(byte));
match mode {
Mode::Encrypt => {
// well, this won't be useful since the library is meant to only decrypt files (for now)
let text = data.to_hex().into_bytes();
let stuff = cbc(mode, text);
let shifted_text = shift(&stuff, amount);
xor(&shifted_text, &key)
},
Mode::Decrypt => {
let amount = 0u8.wrapping_sub(amount); // shift by (256 - amount) for the reverse
let shifted_text = xor(data, &key);
let stuff = shift(&shifted_text, amount);
charred(cbc(mode, stuff))
},
}
}
// Hex-decoding function
fn charred(decode: Vec<u8>) -> Vec<u8> {
// Mostly, I try to stick to immutable borrows, but from_utf8() requires Vec<u8>
// An error means that the decryption has failed! (which should be due to wrong keys)
String::from_utf8(decode)
.map_err(|_| ())
.and_then(|hexed_stuff| hexed_stuff.from_hex().map_err(|_| ()))
.unwrap_or(Vec::new())
}
// Shifts the elements by the given amount
fn shift(text: &[u8], amount: u8) -> Vec<u8> {
text.iter() // wrap around the boundary if the sum overflows
.map(|byte| amount.wrapping_add(*byte))
.collect()
}
// Byte-wise XOR
fn xor(text: &[u8], key: &str) -> Vec<u8> {
let key_array = key.as_bytes();
let (text_size, key_size) = (text.len(), key.len());
(0..text_size).map(|i| text[i] ^ key_array[i % key_size]).collect()
}
// CBC mode as a seed to scramble the final ciphertext
fn cbc(mode: Mode, mut data: Vec<u8>) -> Vec<u8> {
let size = 2usize.pow(BLOCK_SIZE_EXP);
// Well, there's no encryption going on here - just some fireworks to introduce randomness
match mode {
Mode::Encrypt => {
let mut cbc_vec: Vec<u8> = sample(&mut thread_rng(), 1..255, size);
// hex the bytes until the vector has the required length (an integral multiple of block size)
for _ in 0..BLOCK_SIZE_EXP {
data = data.to_hex().into_bytes();
}
cbc_vec.extend(&data);
for i in size..(data.len() + size) {
cbc_vec[i] = cbc_vec[i] ^ cbc_vec[i - size];
}
cbc_vec
},
Mode::Decrypt => | ,
}
}
| {
let mut i = data.len() - 1;
while i >= size {
data[i] = data[i] ^ data[i - size];
i -= 1;
}
let mut stuff = data[size..].to_owned();
for _ in 0..BLOCK_SIZE_EXP {
stuff = charred(stuff);
}
stuff
} | conditional_block |
cipher.rs | use random::{thread_rng, sample};
use serialize::hex::{FromHex, ToHex};
const BLOCK_SIZE_EXP: u32 = 3; // pow(2, 3) == 8 byte blocks
pub enum Mode {
Encrypt,
Decrypt,
}
// Invokes the helper functions and does its shifting thing
pub fn zombify(mode: Mode, data: &[u8], key: &str) -> Vec<u8> {
let hexed_key = key.as_bytes().to_hex();
let amount = hexed_key.as_bytes()
.iter()
.fold(0u8, |amt, &byte| amt.wrapping_add(byte));
match mode {
Mode::Encrypt => {
// well, this won't be useful since the library is meant to only decrypt files (for now)
let text = data.to_hex().into_bytes();
let stuff = cbc(mode, text);
let shifted_text = shift(&stuff, amount);
xor(&shifted_text, &key)
},
Mode::Decrypt => {
let amount = 0u8.wrapping_sub(amount); // shift by (256 - amount) for the reverse
let shifted_text = xor(data, &key);
let stuff = shift(&shifted_text, amount);
charred(cbc(mode, stuff))
},
}
}
// Hex-decoding function
fn charred(decode: Vec<u8>) -> Vec<u8> {
// Mostly, I try to stick to immutable borrows, but from_utf8() requires Vec<u8>
// An error means that the decryption has failed! (which should be due to wrong keys)
String::from_utf8(decode)
.map_err(|_| ())
.and_then(|hexed_stuff| hexed_stuff.from_hex().map_err(|_| ()))
.unwrap_or(Vec::new())
}
// Shifts the elements by the given amount
fn shift(text: &[u8], amount: u8) -> Vec<u8> {
text.iter() // wrap around the boundary if the sum overflows
.map(|byte| amount.wrapping_add(*byte))
.collect()
}
// Byte-wise XOR
fn | (text: &[u8], key: &str) -> Vec<u8> {
let key_array = key.as_bytes();
let (text_size, key_size) = (text.len(), key.len());
(0..text_size).map(|i| text[i] ^ key_array[i % key_size]).collect()
}
// CBC mode as a seed to scramble the final ciphertext
fn cbc(mode: Mode, mut data: Vec<u8>) -> Vec<u8> {
let size = 2usize.pow(BLOCK_SIZE_EXP);
// Well, there's no encryption going on here - just some fireworks to introduce randomness
match mode {
Mode::Encrypt => {
let mut cbc_vec: Vec<u8> = sample(&mut thread_rng(), 1..255, size);
// hex the bytes until the vector has the required length (an integral multiple of block size)
for _ in 0..BLOCK_SIZE_EXP {
data = data.to_hex().into_bytes();
}
cbc_vec.extend(&data);
for i in size..(data.len() + size) {
cbc_vec[i] = cbc_vec[i] ^ cbc_vec[i - size];
}
cbc_vec
},
Mode::Decrypt => {
let mut i = data.len() - 1;
while i >= size {
data[i] = data[i] ^ data[i - size];
i -= 1;
}
let mut stuff = data[size..].to_owned();
for _ in 0..BLOCK_SIZE_EXP {
stuff = charred(stuff);
}
stuff
},
}
}
| xor | identifier_name |
cipher.rs | use random::{thread_rng, sample};
use serialize::hex::{FromHex, ToHex};
const BLOCK_SIZE_EXP: u32 = 3; // pow(2, 3) == 8 byte blocks
pub enum Mode {
Encrypt,
Decrypt,
}
// Invokes the helper functions and does its shifting thing
pub fn zombify(mode: Mode, data: &[u8], key: &str) -> Vec<u8> {
let hexed_key = key.as_bytes().to_hex();
let amount = hexed_key.as_bytes()
.iter()
.fold(0u8, |amt, &byte| amt.wrapping_add(byte));
match mode {
Mode::Encrypt => {
// well, this won't be useful since the library is meant to only decrypt files (for now)
let text = data.to_hex().into_bytes();
let stuff = cbc(mode, text);
let shifted_text = shift(&stuff, amount);
xor(&shifted_text, &key)
},
Mode::Decrypt => {
let amount = 0u8.wrapping_sub(amount); // shift by (256 - amount) for the reverse
let shifted_text = xor(data, &key);
let stuff = shift(&shifted_text, amount);
charred(cbc(mode, stuff))
},
}
}
// Hex-decoding function
fn charred(decode: Vec<u8>) -> Vec<u8> {
// Mostly, I try to stick to immutable borrows, but from_utf8() requires Vec<u8>
// An error means that the decryption has failed! (which should be due to wrong keys)
String::from_utf8(decode)
.map_err(|_| ())
.and_then(|hexed_stuff| hexed_stuff.from_hex().map_err(|_| ()))
.unwrap_or(Vec::new())
}
// Shifts the elements by the given amount
fn shift(text: &[u8], amount: u8) -> Vec<u8> {
text.iter() // wrap around the boundary if the sum overflows
.map(|byte| amount.wrapping_add(*byte))
.collect()
}
// Byte-wise XOR
fn xor(text: &[u8], key: &str) -> Vec<u8> {
let key_array = key.as_bytes();
let (text_size, key_size) = (text.len(), key.len());
(0..text_size).map(|i| text[i] ^ key_array[i % key_size]).collect()
}
// CBC mode as a seed to scramble the final ciphertext
fn cbc(mode: Mode, mut data: Vec<u8>) -> Vec<u8> {
let size = 2usize.pow(BLOCK_SIZE_EXP); | match mode {
Mode::Encrypt => {
let mut cbc_vec: Vec<u8> = sample(&mut thread_rng(), 1..255, size);
// hex the bytes until the vector has the required length (an integral multiple of block size)
for _ in 0..BLOCK_SIZE_EXP {
data = data.to_hex().into_bytes();
}
cbc_vec.extend(&data);
for i in size..(data.len() + size) {
cbc_vec[i] = cbc_vec[i] ^ cbc_vec[i - size];
}
cbc_vec
},
Mode::Decrypt => {
let mut i = data.len() - 1;
while i >= size {
data[i] = data[i] ^ data[i - size];
i -= 1;
}
let mut stuff = data[size..].to_owned();
for _ in 0..BLOCK_SIZE_EXP {
stuff = charred(stuff);
}
stuff
},
}
} | // Well, there's no encryption going on here - just some fireworks to introduce randomness | random_line_split |
ground_station_base.py |
import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
|
print "Exiting application."
if __name__ == '__main__':
main()
| process_cmd(cmd_str) | conditional_block |
ground_station_base.py |
import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
|
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
process_cmd(cmd_str)
print "Exiting application."
if __name__ == '__main__':
main()
| report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position) | identifier_body |
ground_station_base.py | import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position): |
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def main():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
process_cmd(cmd_str)
print "Exiting application."
if __name__ == '__main__':
main() | global aprs_callsign
#print position | random_line_split |
ground_station_base.py |
import os
from optparse import OptionParser
import io
import time
import random
import thread
import sys
from smtp_stuff import sendMail
from imap_stuff import checkMessages
import datetime
import string
import array
from time import gmtime, strftime
from socket import *
user = ''
recipient = ''
incoming_server = ''
outgoing_server = ''
password = ''
imei = 0
aprs_server = 'second.aprs.net'
aprs_port = 20157
aprs_password = ''
aprs_callsign = ''
aprs_address = '>APRS,TCPIP*:'
aprs_is_enabled = False
# comment length is supposed to be 0 to 43 char.
email_enabled = False
ip_enabled = False
http_post_enabled = False
COMMAND_GET_POS = 0
COMMAND_RELEASE = 1
COMMAND_SET_REPORT_INTERVAL = 2
def send_mo_email(msg):
global email
global incoming_server
global outgoing_server
global password
global imei
#put together body
body = ''
#subject
subject = '%d' % imei
#message is included as an attachment
attachment = 'msg.sbd'
fd = open(attachment, 'wb')
fd.write(msg)
fd.close()
sendMail(subject, body, user, recipient, password, outgoing_server, attachment)
def log(string):
print string
#TODO logic for text logging
def parse_text_report_no_fix(report):
report = report.split(":")
report = report[1]
report = report.split(",")
int_temp = float(report[0])
ext_temp = float(report[1])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
def send_aprs_packet(position):
global aprs_callsign
#print position
# create socket & connect to server
sSock = socket(AF_INET, SOCK_STREAM)
sSock.connect((aprs_server, aprs_port))
# logon
sSock.send('user ' + aprs_callsign + ' pass ' + aprs_password + ' vers "' + aprs_callsign + ' Python" \n')
#get position information and encode string
lat = position[1]
lon = position[2]
alt = 100
kts = 0.1
crs = 30
# deg mm.mm
lat_str = "=%02d" % ( lat ) + "%05.2f" % ( ( abs(lat) % 1 ) * 60.0 )
if lat > 0:
lat_str += "N"
else:
lat_str += "S"
# deg mm.mm
lon_str = "%03d" % ( abs(lon) ) + "%05.2f" % ( ( abs(lon) % 1 ) * 60.0 )
if lat > 0:
lon_str += "W"
else:
lon_str += "E"
#combine the two
position_str = lat_str + "/" + lon_str
#add course, speed, and altitude
comment = "O%03d/%03d/A=%06d" % (crs,kts,alt)
#comment = "-HELP ME"
#print aprs_callsign + aprs_address + position_str + comment
sSock.send(aprs_callsign + aprs_address + position_str + comment +'\n')
print("Packet sent to APRS: " + time.ctime() )
# close socket -- must be closed to avoidbuffer overflow
sSock.shutdown(0)
sSock.close()
def update_position(position):
if aprs_is_enabled:
send_aprs_packet(position)
def parse_text_report(report):
report = report.split(":")
report = report[1]
report = report.split(",")
time_str = report[0]
lat = float(report[1])
lon = float(report[2])
alt = float(report[3])
kts = float(report[4])
crs = float(report[5])
position = [time_str,lat,lon,alt,kts,crs]
int_temp = float(report[6])
ext_temp = float(report[7])
if (int_temp > 100.0 or ext_temp > 100.0):
log("Probable invalid temperature readings.")
else:
log("Internal Temp:%.1f External Temp:%.1f" % ( int_temp, ext_temp))
print "Report - Lat:",lat,"Lon:",lon,"Alt(ft):",alt,"Speed(kts):",kts,"Course(deg):",crs
update_position(position)
MSG_TEXT_REPORT = 'U'
MSG_TEXT_REPORT_NO_FIX = 'F'
def parse_incoming(msg):
#TODO: My gawd, this is ugly.. lets do something else?
if msg[0] == MSG_TEXT_REPORT_NO_FIX:
parse_text_report_no_fix(msg)
elif msg[0] == MSG_TEXT_REPORT:
parse_text_report(msg)
def email_check_task(name):
#check e-mail for messages
while(1):
#print 'Checking email'
msg,subject,received_msg,unread_msgs = checkMessages(incoming_server,user,password)
if received_msg:
print "Received Message", msg,"\r"
parse_incoming(msg)
time.sleep(1.0)
def SET_REPORT_INTERVAL(args):
print "Setting reporting interval"
if RepresentsInt(args[0]):
value = int(args[0])
byte1 = ( value >> 8 ) & 0xFF
byte0 = ( value ) & 0xFF
msg = array.array('B',[COMMAND_SET_REPORT_INTERVAL,byte1,byte0])
send_mo_email(msg)
else:
"First argument must be int seconds between 1 - 65532. 0 to disable automatic reporting."
def GET_POS(args):
print "Sending position request"
msg = array.array('B',[COMMAND_GET_POS,1,2,3]) #extra bytes for not good reason
send_mo_email(msg)
def RELEASE(args):
print "Sending ballast release command"
if RepresentsInt(args[0]):
msg = array.array('B',[COMMAND_RELEASE,int(args[0])])
print msg
send_mo_email(msg)
else:
"First argument must be int"
def RepresentsInt(s):
try:
int(s)
return True
except ValueError:
return False
def process_cmd(cmd_str):
#split up the string by space
cmd_args = cmd_str.split(' ')
#caps on CLI input
cmd_args[0] = cmd_args[0].upper()
if(len(cmd_args) > 1):
args = cmd_args[1:]
else:
args = []
possibles = globals().copy()
possibles.update(locals())
method = possibles.get(cmd_args[0])
if not method:
print("Method %s not implemented" % cmd_args[0])
else:
method(args)
def | ():
global user
global recipient
global incoming_server
global outgoing_server
global password
global email_enabled
global ip_enabled
global http_post_enabled
global aprs_server
global aprs_port
global aprs_password
global aprs_callsign
global aprs_is_enabled
parser = OptionParser()
parser.add_option("-p", "--passwd", dest="passwd", action="store", help="Password", metavar="PASSWD")
parser.add_option("-u", "--user", dest="user", action="store", help="E-mail account username", metavar="USER")
parser.add_option("-r", "--recipient", dest="recipient", action="store", help="Destination e-mail address.", metavar="USER")
parser.add_option("-i", "--in_srv", dest="in_srv", action="store", help="Incoming e-mail server url", metavar="IN_SRV")
parser.add_option("-o", "--out_srv", dest="out_srv", action="store", help="Outoging e-mail server", metavar="OUT_SRV")
parser.add_option("-m", "--mode", dest="mode", action="store", help="Mode: EMAIL,HTTP_POST,IP,NONE", default="NONE", metavar="MODE")
parser.add_option("-I", "--imei", dest="imei",action="store",help="IMEI of target modem.",metavar="IMEI")
parser.add_option("-A", "--aprs-server",dest="aprs_server",action="store",help="APRS server",metavar="APRS_SERVER")
parser.add_option("-a", "--aprs-port",dest="aprs_port",action="store",help="APRS port",metavar="APRS_PORT")
parser.add_option("-s", "--aprs-password",dest="aprs_password",action="store",help="APRS password",metavar="APRS_PASSWORD")
parser.add_option("-c", "--aprs-callsign",dest="aprs_callsign",action="store",help="APRS Callsign",metavar="APRS_CALLSIGN")
(options, args) = parser.parse_args()
if options.aprs_server:
aprs_server = options.aprs_server
if options.aprs_port:
aprs_port = options.aprs_port
if options.aprs_password:
aprs_password = options.aprs_password
aprs_is_enabled = True
if options.aprs_callsign:
aprs_callsign = options.aprs_callsign
#check for valid arguments
if options.mode == "EMAIL":
if options.passwd is None or options.user is None or options.recipient is None or options.in_srv is None or options.out_srv is None:
print 'If you want to use e-mail, you must specify in/out servers, user, password, and recipient address.'
sys.exit()
else:
email_enabled = True
elif options.mode == "HTTP_POST":
print 'Not implemented yet'
sys.exit()
elif options.mode == "IP":
print 'Not implemented yet'
sys.exit()
else:
print "No valid mode specified"
sys.exit()
user = options.user
recipient = options.recipient
incoming_server = options.in_srv
outgoing_server = options.out_srv
password = options.passwd
imei = options.imei
#spawn task to monitor email for incoming messages
thread.start_new_thread ( email_check_task, ( "Thread-1" , ) )
rx_buffer = ''
while(1):
"Enter 'x' to exit"
cmd_str = raw_input("# ")
if cmd_str == 'x':
break
if not cmd_str == '':
process_cmd(cmd_str)
print "Exiting application."
if __name__ == '__main__':
main()
| main | identifier_name |
isInstanceOf.js | /**
* @name isInstanceOf
* @description Checks if given flair class/struct instance is an instance of given class/struct type or
* if given class instance implements given interface or has given mixin mixed somewhere in class
* hierarchy
* @example
* isInstanceOf(obj, type)
* @params
* obj: object - flair object instance that needs to be checked
* Type: flair type of string
* @returns {boolean} - true/false
*/
const _isInstanceOf = (obj, Type) => {
// NOTE: in all 'check' type functions, Args() is not to be used, as Args use them itself
let _objType = _typeOf(obj),
_typeType = _typeOf(Type),
isMatched = false;
if (flairInstances.indexOf(_objType) === -1) { throw _Exception.InvalidArgument('obj', _isInstanceOf); } | switch(_typeType) {
case 'class':
isMatched = objMeta.isInstanceOf(Type);
if (!isMatched) {
isMatched = objMeta.Type[meta].isDerivedFrom(Type);
}
break;
case 'struct':
isMatched = objMeta.isInstanceOf(Type); break;
case 'interface':
isMatched = objMeta.isImplements(Type); break;
case 'mixin':
isMatched = objMeta.isMixed(Type); break;
case 'string':
isMatched = objMeta.isInstanceOf(Type);
if (!isMatched && typeof objMeta.isImplements === 'function') { isMatched = objMeta.isImplements(Type); }
if (!isMatched && typeof objMeta.isMixed === 'function') { isMatched = objMeta.isMixed(Type); }
break;
}
// return
return isMatched;
};
// attach to flair
a2f('isInstanceOf', _isInstanceOf); | if (flairTypes.indexOf(_typeType) === -1 && _typeType !== 'string') { throw _Exception.InvalidArgument('Type', _isInstanceOf); }
let objMeta = obj[meta]; | random_line_split |
isInstanceOf.js | /**
* @name isInstanceOf
* @description Checks if given flair class/struct instance is an instance of given class/struct type or
* if given class instance implements given interface or has given mixin mixed somewhere in class
* hierarchy
* @example
* isInstanceOf(obj, type)
* @params
* obj: object - flair object instance that needs to be checked
* Type: flair type of string
* @returns {boolean} - true/false
*/
const _isInstanceOf = (obj, Type) => {
// NOTE: in all 'check' type functions, Args() is not to be used, as Args use them itself
let _objType = _typeOf(obj),
_typeType = _typeOf(Type),
isMatched = false;
if (flairInstances.indexOf(_objType) === -1) { throw _Exception.InvalidArgument('obj', _isInstanceOf); }
if (flairTypes.indexOf(_typeType) === -1 && _typeType !== 'string') { throw _Exception.InvalidArgument('Type', _isInstanceOf); }
let objMeta = obj[meta];
switch(_typeType) {
case 'class':
isMatched = objMeta.isInstanceOf(Type);
if (!isMatched) {
isMatched = objMeta.Type[meta].isDerivedFrom(Type);
}
break;
case 'struct':
isMatched = objMeta.isInstanceOf(Type); break;
case 'interface':
isMatched = objMeta.isImplements(Type); break;
case 'mixin':
isMatched = objMeta.isMixed(Type); break;
case 'string':
isMatched = objMeta.isInstanceOf(Type);
if (!isMatched && typeof objMeta.isImplements === 'function') |
if (!isMatched && typeof objMeta.isMixed === 'function') { isMatched = objMeta.isMixed(Type); }
break;
}
// return
return isMatched;
};
// attach to flair
a2f('isInstanceOf', _isInstanceOf);
| { isMatched = objMeta.isImplements(Type); } | conditional_block |
main.ts | import {bootstrap} from '@angular/platform-browser-dynamic';
import {HAMMER_GESTURE_CONFIG} from '@angular/platform-browser';
import {DemoApp} from './app/demo-app';
import {HTTP_PROVIDERS} from '@angular/http';
import {ROUTER_PROVIDERS} from '@angular/router';
import {MdIconRegistry} from './components/icon/icon-registry';
import {OVERLAY_CONTAINER_TOKEN} from './core/overlay/overlay';
import {MdLiveAnnouncer} from './core/live-announcer/live-announcer';
import {provide} from '@angular/core';
import {createOverlayContainer} from './core/overlay/overlay-container';
import {Renderer} from '@angular/core';
import {MdGestureConfig} from './core/gestures/MdGestureConfig';
import 'rxjs/Rx';
bootstrap(DemoApp, [
ROUTER_PROVIDERS,
MdLiveAnnouncer,
provide(OVERLAY_CONTAINER_TOKEN, {useValue: createOverlayContainer()}),
HTTP_PROVIDERS,
MdIconRegistry, | ]); | Renderer,
provide(HAMMER_GESTURE_CONFIG, {useClass: MdGestureConfig}) | random_line_split |
test_innit.py | from recipe_scrapers.innit import Innit
from tests import ScraperTest
class TestInnitScraper(ScraperTest):
scraper_class = Innit
def test_host(self):
self.assertEqual("innit.com", self.harvester_class.host())
def test_title(self):
self.assertEqual(
"Tofu Mixed Greens Salad with Broccoli Beet Mix & Carrot Ginger Dressing",
self.harvester_class.title(),
)
def test_total_time(self):
self.assertEqual(51, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 serving(s)", self.harvester_class.yields())
def test_ingredients(self):
self.assertEqual(
[
"2 Carrots",
"1 piece Fresh Ginger",
"1/2 Orange",
"1 Tbsp Fresh Chives",
"2 cups Broccoli",
"1 lb Precooked Beets",
"2 Tbsp Italian Parsley",
"4 cups Fresh Spring Mix", | "1 1/3 tsp Kosher Salt",
"2/3 cup Olive Oil",
"2 pinches Black Pepper",
"1/4 cup Rice Wine Vinegar",
"1 cup Sunflower Seeds",
],
self.harvester_class.ingredients(),
)
def test_nutrients(self):
self.assertEqual(
{
"sugarContent": "18 g",
"proteinContent": "32 g",
"fiberContent": "11 g",
"unsaturatedFatContent": "55 g",
"fatContent": "64 g",
"cholesterolContent": "0 mg",
"calories": "830 kcal",
"carbohydrateContent": "34 g",
"saturatedFatContent": "9 g",
"sodiumContent": "1060 mg",
},
self.harvester_class.nutrients(),
)
def test_instructions(self):
self.assertEqual(
"""Preheat
Preheat the oven to 425F.
Line sheet pan with foil.
Sear Tofu
Drain, pat dry & prepare tofu.
Heat pan on high heat for 2 minutes.
Cook for 7 min or until golden brown on all sides, seasoning half way.
Remove from pan.
Bake Broccoli
Toss broccoli with oil & salt.
Bake for 22 minutes.
Simmer Carrots & Ginger
Prepare ingredients.
Pre-heat pan. Add all ingredients; cover with water.
Cook until soft, about 5 minutes.
Blend Dressing Ingredients
Transfer carrot-ginger mixture to blender.
Add orange juice/zest, miso, sesame oil, honey, rice vinegar.
Blend until smooth.
Let cool in fridge for 10 minutes. Fold in chives & season.
Flavor Beets
Toss beets in oil. Season with salt, pepper & parsley.
Toast Sunflower Seeds
Combine ingredients.
Toast in oven for 5 - 7 min or until golden brown.
Mixed Greens
Wash greens & dry.
Serve and Enjoy!
Pair with your favorite music!""",
self.harvester_class.instructions(),
)
def test_image(self):
self.assertEqual(
"https://www.innit.com/meal-service/en-US/images/Meal-Salads%3A%20Blended-Carrot_Ginger_Dressing%2BAssembled-Broccoli_Beet_Mix%2BSeared-Tofu-Diced%2BOlive_Oil%2BPrepared-Mixed_Greens_480x480.png",
self.harvester_class.image(),
) | "2 packages Extra Firm Tofu",
"1 Tbsp Miso Paste",
"1/2 tsp Sesame Seed Oil",
"1/2 Tbsp Honey", | random_line_split |
test_innit.py | from recipe_scrapers.innit import Innit
from tests import ScraperTest
class TestInnitScraper(ScraperTest):
scraper_class = Innit
def test_host(self):
self.assertEqual("innit.com", self.harvester_class.host())
def test_title(self):
self.assertEqual(
"Tofu Mixed Greens Salad with Broccoli Beet Mix & Carrot Ginger Dressing",
self.harvester_class.title(),
)
def test_total_time(self):
self.assertEqual(51, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 serving(s)", self.harvester_class.yields())
def test_ingredients(self):
self.assertEqual(
[
"2 Carrots",
"1 piece Fresh Ginger",
"1/2 Orange",
"1 Tbsp Fresh Chives",
"2 cups Broccoli",
"1 lb Precooked Beets",
"2 Tbsp Italian Parsley",
"4 cups Fresh Spring Mix",
"2 packages Extra Firm Tofu",
"1 Tbsp Miso Paste",
"1/2 tsp Sesame Seed Oil",
"1/2 Tbsp Honey",
"1 1/3 tsp Kosher Salt",
"2/3 cup Olive Oil",
"2 pinches Black Pepper",
"1/4 cup Rice Wine Vinegar",
"1 cup Sunflower Seeds",
],
self.harvester_class.ingredients(),
)
def test_nutrients(self):
self.assertEqual(
{
"sugarContent": "18 g",
"proteinContent": "32 g",
"fiberContent": "11 g",
"unsaturatedFatContent": "55 g",
"fatContent": "64 g",
"cholesterolContent": "0 mg",
"calories": "830 kcal",
"carbohydrateContent": "34 g",
"saturatedFatContent": "9 g",
"sodiumContent": "1060 mg",
},
self.harvester_class.nutrients(),
)
def test_instructions(self):
self.assertEqual(
"""Preheat
Preheat the oven to 425F.
Line sheet pan with foil.
Sear Tofu
Drain, pat dry & prepare tofu.
Heat pan on high heat for 2 minutes.
Cook for 7 min or until golden brown on all sides, seasoning half way.
Remove from pan.
Bake Broccoli
Toss broccoli with oil & salt.
Bake for 22 minutes.
Simmer Carrots & Ginger
Prepare ingredients.
Pre-heat pan. Add all ingredients; cover with water.
Cook until soft, about 5 minutes.
Blend Dressing Ingredients
Transfer carrot-ginger mixture to blender.
Add orange juice/zest, miso, sesame oil, honey, rice vinegar.
Blend until smooth.
Let cool in fridge for 10 minutes. Fold in chives & season.
Flavor Beets
Toss beets in oil. Season with salt, pepper & parsley.
Toast Sunflower Seeds
Combine ingredients.
Toast in oven for 5 - 7 min or until golden brown.
Mixed Greens
Wash greens & dry.
Serve and Enjoy!
Pair with your favorite music!""",
self.harvester_class.instructions(),
)
def | (self):
self.assertEqual(
"https://www.innit.com/meal-service/en-US/images/Meal-Salads%3A%20Blended-Carrot_Ginger_Dressing%2BAssembled-Broccoli_Beet_Mix%2BSeared-Tofu-Diced%2BOlive_Oil%2BPrepared-Mixed_Greens_480x480.png",
self.harvester_class.image(),
)
| test_image | identifier_name |
test_innit.py | from recipe_scrapers.innit import Innit
from tests import ScraperTest
class TestInnitScraper(ScraperTest):
scraper_class = Innit
def test_host(self):
self.assertEqual("innit.com", self.harvester_class.host())
def test_title(self):
self.assertEqual(
"Tofu Mixed Greens Salad with Broccoli Beet Mix & Carrot Ginger Dressing",
self.harvester_class.title(),
)
def test_total_time(self):
self.assertEqual(51, self.harvester_class.total_time())
def test_yields(self):
self.assertEqual("4 serving(s)", self.harvester_class.yields())
def test_ingredients(self):
self.assertEqual(
[
"2 Carrots",
"1 piece Fresh Ginger",
"1/2 Orange",
"1 Tbsp Fresh Chives",
"2 cups Broccoli",
"1 lb Precooked Beets",
"2 Tbsp Italian Parsley",
"4 cups Fresh Spring Mix",
"2 packages Extra Firm Tofu",
"1 Tbsp Miso Paste",
"1/2 tsp Sesame Seed Oil",
"1/2 Tbsp Honey",
"1 1/3 tsp Kosher Salt",
"2/3 cup Olive Oil",
"2 pinches Black Pepper",
"1/4 cup Rice Wine Vinegar",
"1 cup Sunflower Seeds",
],
self.harvester_class.ingredients(),
)
def test_nutrients(self):
|
def test_instructions(self):
self.assertEqual(
"""Preheat
Preheat the oven to 425F.
Line sheet pan with foil.
Sear Tofu
Drain, pat dry & prepare tofu.
Heat pan on high heat for 2 minutes.
Cook for 7 min or until golden brown on all sides, seasoning half way.
Remove from pan.
Bake Broccoli
Toss broccoli with oil & salt.
Bake for 22 minutes.
Simmer Carrots & Ginger
Prepare ingredients.
Pre-heat pan. Add all ingredients; cover with water.
Cook until soft, about 5 minutes.
Blend Dressing Ingredients
Transfer carrot-ginger mixture to blender.
Add orange juice/zest, miso, sesame oil, honey, rice vinegar.
Blend until smooth.
Let cool in fridge for 10 minutes. Fold in chives & season.
Flavor Beets
Toss beets in oil. Season with salt, pepper & parsley.
Toast Sunflower Seeds
Combine ingredients.
Toast in oven for 5 - 7 min or until golden brown.
Mixed Greens
Wash greens & dry.
Serve and Enjoy!
Pair with your favorite music!""",
self.harvester_class.instructions(),
)
def test_image(self):
self.assertEqual(
"https://www.innit.com/meal-service/en-US/images/Meal-Salads%3A%20Blended-Carrot_Ginger_Dressing%2BAssembled-Broccoli_Beet_Mix%2BSeared-Tofu-Diced%2BOlive_Oil%2BPrepared-Mixed_Greens_480x480.png",
self.harvester_class.image(),
)
| self.assertEqual(
{
"sugarContent": "18 g",
"proteinContent": "32 g",
"fiberContent": "11 g",
"unsaturatedFatContent": "55 g",
"fatContent": "64 g",
"cholesterolContent": "0 mg",
"calories": "830 kcal",
"carbohydrateContent": "34 g",
"saturatedFatContent": "9 g",
"sodiumContent": "1060 mg",
},
self.harvester_class.nutrients(),
) | identifier_body |
__init__.py | """
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
# this is the endpoint that my new version of this program will
# abuse with possible store ids. this is a much more reliable "darts at the wall"
# technique than the previous location-based one
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
# from testing, I have confirmed certain "series" of store IDs
# 0000 series are all old stores in PA, NJ, MD, DE, and VA
# 5000 series are all stores in FL
# 8000 series are all new stores in PA, NJ, MD, DE, and VA
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
# currently only tracking these gas types to keep a consistent csv schema.
# other types are not consistent across all wawas
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
"""
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def get_wawa_data(limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS:
response = requests.get(QUERY_URL, params={"storeNumber": i})
if response.status_code != 404:
|
return output
| location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break | conditional_block |
__init__.py | """
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
# this is the endpoint that my new version of this program will
# abuse with possible store ids. this is a much more reliable "darts at the wall"
# technique than the previous location-based one
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
# from testing, I have confirmed certain "series" of store IDs
# 0000 series are all old stores in PA, NJ, MD, DE, and VA
# 5000 series are all stores in FL
# 8000 series are all new stores in PA, NJ, MD, DE, and VA
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
# currently only tracking these gas types to keep a consistent csv schema.
# other types are not consistent across all wawas
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
"""
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def | (limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS:
response = requests.get(QUERY_URL, params={"storeNumber": i})
if response.status_code != 404:
location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break
return output
| get_wawa_data | identifier_name |
__init__.py | """
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
# this is the endpoint that my new version of this program will
# abuse with possible store ids. this is a much more reliable "darts at the wall"
# technique than the previous location-based one
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
# from testing, I have confirmed certain "series" of store IDs
# 0000 series are all old stores in PA, NJ, MD, DE, and VA
# 5000 series are all stores in FL
# 8000 series are all new stores in PA, NJ, MD, DE, and VA
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
# currently only tracking these gas types to keep a consistent csv schema.
# other types are not consistent across all wawas
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
"""
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def get_wawa_data(limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS: | if response.status_code != 404:
location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break
return output | response = requests.get(QUERY_URL, params={"storeNumber": i})
| random_line_split |
__init__.py | """
scraping
the utility functions for the actual web scraping
"""
import ssl
import datetime
import requests
import re
# this is the endpoint that my new version of this program will
# abuse with possible store ids. this is a much more reliable "darts at the wall"
# technique than the previous location-based one
QUERY_URL = "https://www.wawa.com/Handlers/LocationByStoreNumber.ashx"
# from testing, I have confirmed certain "series" of store IDs
# 0000 series are all old stores in PA, NJ, MD, DE, and VA
# 5000 series are all stores in FL
# 8000 series are all new stores in PA, NJ, MD, DE, and VA
POSSIBLE_STORE_NUMS = list(range(5000, 6000))
POSSIBLE_STORE_NUMS.extend(list(range(0, 1000)))
POSSIBLE_STORE_NUMS.extend(list(range(8000, 9000)))
# currently only tracking these gas types to keep a consistent csv schema.
# other types are not consistent across all wawas
GAS_TYPES = ["diesel", "plus", "unleaded", "premium"]
def parse_gas_prices(in_location):
|
def camel_to_underscore(in_string):
"""
Basic function that converts a camel-cased word to use underscores
:param in_string: The camel-cased string (str)
:return: The underscore'd string (str)
"""
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', in_string)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_amenities(in_location):
"""
Breaks open the json for the amenities offered at the Wawa location
:param in_location: The Wawa location (dict)
:return: The amenity info (dict)
"""
out_data = {}
for amenity, value in in_location["amenities"].items():
out_data["has_" + camel_to_underscore(amenity).lower()] = value
return out_data
def get_addresses(in_location):
"""
Parses info for the Wawa address and coordinates
:param in_location: The Wawa location (dict)
:return: The address and coordincate info (dict)
"""
friendly = in_location["addresses"][0]
physical = in_location["addresses"][1]
out_friendly = {
"address": friendly["address"],
"city": friendly["city"],
"state": friendly["state"],
"zip": friendly["zip"]
}
out_physical = {
"longitude": physical["loc"][1],
"latitude": physical["loc"][0],
}
return {"address": out_friendly, "coordinates": out_physical}
def get_wawa_data(limit=None):
"""
Hits the store number url endpoint to pull down Wawa locations and
parse each one's information. We don't know the store numbers as there
is not list of store numbers. Through testing I was able to narrow down
"series" of store numbers, so we iterate through ranges of possible
store numbers, skipping any 404 errors (invalid store id responses
returned by url calls).
:param limit: A cap on the number of Wawa results returned (int) (optional)
:return: Parsed Wawa information (list<dict>)
"""
ssl._create_default_https_context = ssl._create_unverified_context
output = []
for i in POSSIBLE_STORE_NUMS:
response = requests.get(QUERY_URL, params={"storeNumber": i})
if response.status_code != 404:
location = response.json()
geographic_data = get_addresses(location)
address = geographic_data["address"]
coordinates = geographic_data["coordinates"]
gas_prices = parse_gas_prices(location)
amenities = parse_amenities(location)
this_location_output = {
"has_menu": location["hasMenu"],
"last_updated": datetime.datetime.strptime(location["lastUpdated"], "%m/%d/%Y %I:%M %p"),
"location_id": location["locationID"],
"open_24_hours": location["open24Hours"],
"regional_director": location["regionalDirector"],
"store_close": location["storeClose"],
"store_name": location["storeName"],
"store_number": location["storeNumber"],
"store_open": location["storeOpen"],
"telephone": location["telephone"]
}
this_location_output = {**this_location_output, **address}
this_location_output = {**this_location_output, **coordinates}
this_location_output = {**this_location_output, **gas_prices}
this_location_output = {**this_location_output, **amenities}
output.append(this_location_output)
if limit and len(output) == limit:
break
return output
| """
Breaks open the json for the gas prices
:param in_location: The Wawa location we are looking at (dict)
:return: The gas price info (dict)
"""
out_data = {}
try:
fuel_data = in_location["fuelTypes"]
for ft in fuel_data:
lowered = ft["description"].lower()
if lowered in GAS_TYPES:
out_data[lowered + "_price"] = ft["price"]
# no gas sold at this Wawa
except KeyError:
for gt in GAS_TYPES:
out_data[gt + "_price"] = ""
return out_data | identifier_body |
activitybar.ts | import { loadstyle } from "../../load";
import * as path from 'path';
import { dom, quickDom, emptyDom } from '../../dom/dom'
import { component } from '../component'
export class activity {
public name: string;
public label: dom;
public isActive: boolean;
public workbench: dom;
constructor(name: string) {
this.name = name;
this.isActive = false;
}
}
console.log(path.join(__dirname, './media/activitybar.css'));
loadstyle(path.join(__dirname, './media/activitybar.css'));
export class activitybar extends component {
private activityList: dom;
private activities: activity[];
constructor(
parent: dom
) {
super();
if (!parent) {
throw new Error('Invalid call to activitybar');
}
this.container = emptyDom().element('div', 'activitybar');
this.container.apendTo(parent);
this.activities = new Array();
// Add containers for activity
this.createActivity();
}
createActivity() {
let activityparent = emptyDom().element('div', 'activity-container');
activityparent.apendTo(this.container);
this.activityList = emptyDom().element('ul', 'activity-list');
this.activityList.apendTo(activityparent);
}
updateStyle() {
// style update for statusbar
super.updateStyle();
}
addActivity(name: string, workbench: dom | undefined, context: any, fn: (act: activity, context: any) => void) {
let act = new activity(name);
var item = emptyDom().element('li', 'activity-item');
item.apendTo(this.activityList);
act.label = emptyDom().element('a', 'activity-label');
act.label.apendTo(item);
act.label.addClass(act.name);
act.label.title(act.name);
if (typeof workbench !== 'undefined') |
item.on('mouseover', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseout', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('mousedown', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseup', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('click', (e: Event) => {
e.preventDefault();
for (let i = 0; i < this.activities.length; i++) {
this.activities[i].label.getHTMLElement().classList.remove('active');
if (typeof this.activities[i].workbench !== 'undefined') {
this.activities[i].workbench.addClass('hide');
}
this.activities[i].isActive = false;
}
act.label.addClass('active');
act.isActive = true;
if (typeof act.workbench !== 'undefined') {
act.workbench.removeClass('hide');
}
console.log('Invoking the registered workbench');
console.log(act);
//after this call the registered callback
fn(act, context);
})
this.activities.push(act);
}
} | {
act.workbench = workbench;
act.workbench.addClass('hide');
} | conditional_block |
activitybar.ts | import { loadstyle } from "../../load";
import * as path from 'path';
import { dom, quickDom, emptyDom } from '../../dom/dom'
import { component } from '../component'
export class activity {
public name: string;
public label: dom;
public isActive: boolean;
public workbench: dom;
constructor(name: string) {
this.name = name;
this.isActive = false;
}
}
console.log(path.join(__dirname, './media/activitybar.css'));
loadstyle(path.join(__dirname, './media/activitybar.css'));
export class activitybar extends component {
private activityList: dom;
private activities: activity[];
constructor(
parent: dom
) {
super();
if (!parent) {
throw new Error('Invalid call to activitybar');
}
this.container = emptyDom().element('div', 'activitybar');
this.container.apendTo(parent);
this.activities = new Array();
// Add containers for activity
this.createActivity();
}
createActivity() |
updateStyle() {
// style update for statusbar
super.updateStyle();
}
addActivity(name: string, workbench: dom | undefined, context: any, fn: (act: activity, context: any) => void) {
let act = new activity(name);
var item = emptyDom().element('li', 'activity-item');
item.apendTo(this.activityList);
act.label = emptyDom().element('a', 'activity-label');
act.label.apendTo(item);
act.label.addClass(act.name);
act.label.title(act.name);
if (typeof workbench !== 'undefined') {
act.workbench = workbench;
act.workbench.addClass('hide');
}
item.on('mouseover', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseout', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('mousedown', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseup', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('click', (e: Event) => {
e.preventDefault();
for (let i = 0; i < this.activities.length; i++) {
this.activities[i].label.getHTMLElement().classList.remove('active');
if (typeof this.activities[i].workbench !== 'undefined') {
this.activities[i].workbench.addClass('hide');
}
this.activities[i].isActive = false;
}
act.label.addClass('active');
act.isActive = true;
if (typeof act.workbench !== 'undefined') {
act.workbench.removeClass('hide');
}
console.log('Invoking the registered workbench');
console.log(act);
//after this call the registered callback
fn(act, context);
})
this.activities.push(act);
}
} | {
let activityparent = emptyDom().element('div', 'activity-container');
activityparent.apendTo(this.container);
this.activityList = emptyDom().element('ul', 'activity-list');
this.activityList.apendTo(activityparent);
} | identifier_body |
activitybar.ts | import { loadstyle } from "../../load";
import * as path from 'path';
import { dom, quickDom, emptyDom } from '../../dom/dom'
import { component } from '../component'
export class activity {
public name: string;
public label: dom;
public isActive: boolean;
public workbench: dom;
constructor(name: string) {
this.name = name;
this.isActive = false;
}
} |
console.log(path.join(__dirname, './media/activitybar.css'));
loadstyle(path.join(__dirname, './media/activitybar.css'));
export class activitybar extends component {
private activityList: dom;
private activities: activity[];
constructor(
parent: dom
) {
super();
if (!parent) {
throw new Error('Invalid call to activitybar');
}
this.container = emptyDom().element('div', 'activitybar');
this.container.apendTo(parent);
this.activities = new Array();
// Add containers for activity
this.createActivity();
}
createActivity() {
let activityparent = emptyDom().element('div', 'activity-container');
activityparent.apendTo(this.container);
this.activityList = emptyDom().element('ul', 'activity-list');
this.activityList.apendTo(activityparent);
}
updateStyle() {
// style update for statusbar
super.updateStyle();
}
addActivity(name: string, workbench: dom | undefined, context: any, fn: (act: activity, context: any) => void) {
let act = new activity(name);
var item = emptyDom().element('li', 'activity-item');
item.apendTo(this.activityList);
act.label = emptyDom().element('a', 'activity-label');
act.label.apendTo(item);
act.label.addClass(act.name);
act.label.title(act.name);
if (typeof workbench !== 'undefined') {
act.workbench = workbench;
act.workbench.addClass('hide');
}
item.on('mouseover', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseout', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('mousedown', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseup', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('click', (e: Event) => {
e.preventDefault();
for (let i = 0; i < this.activities.length; i++) {
this.activities[i].label.getHTMLElement().classList.remove('active');
if (typeof this.activities[i].workbench !== 'undefined') {
this.activities[i].workbench.addClass('hide');
}
this.activities[i].isActive = false;
}
act.label.addClass('active');
act.isActive = true;
if (typeof act.workbench !== 'undefined') {
act.workbench.removeClass('hide');
}
console.log('Invoking the registered workbench');
console.log(act);
//after this call the registered callback
fn(act, context);
})
this.activities.push(act);
}
} | random_line_split | |
activitybar.ts | import { loadstyle } from "../../load";
import * as path from 'path';
import { dom, quickDom, emptyDom } from '../../dom/dom'
import { component } from '../component'
export class | {
public name: string;
public label: dom;
public isActive: boolean;
public workbench: dom;
constructor(name: string) {
this.name = name;
this.isActive = false;
}
}
console.log(path.join(__dirname, './media/activitybar.css'));
loadstyle(path.join(__dirname, './media/activitybar.css'));
export class activitybar extends component {
private activityList: dom;
private activities: activity[];
constructor(
parent: dom
) {
super();
if (!parent) {
throw new Error('Invalid call to activitybar');
}
this.container = emptyDom().element('div', 'activitybar');
this.container.apendTo(parent);
this.activities = new Array();
// Add containers for activity
this.createActivity();
}
createActivity() {
let activityparent = emptyDom().element('div', 'activity-container');
activityparent.apendTo(this.container);
this.activityList = emptyDom().element('ul', 'activity-list');
this.activityList.apendTo(activityparent);
}
updateStyle() {
// style update for statusbar
super.updateStyle();
}
addActivity(name: string, workbench: dom | undefined, context: any, fn: (act: activity, context: any) => void) {
let act = new activity(name);
var item = emptyDom().element('li', 'activity-item');
item.apendTo(this.activityList);
act.label = emptyDom().element('a', 'activity-label');
act.label.apendTo(item);
act.label.addClass(act.name);
act.label.title(act.name);
if (typeof workbench !== 'undefined') {
act.workbench = workbench;
act.workbench.addClass('hide');
}
item.on('mouseover', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseout', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('mousedown', (e: Event) => {
act.label.addClass('active');
})
item.on('mouseup', (e: Event) => {
if (act.isActive !== true) {
act.label.removeClass('active');
}
})
item.on('click', (e: Event) => {
e.preventDefault();
for (let i = 0; i < this.activities.length; i++) {
this.activities[i].label.getHTMLElement().classList.remove('active');
if (typeof this.activities[i].workbench !== 'undefined') {
this.activities[i].workbench.addClass('hide');
}
this.activities[i].isActive = false;
}
act.label.addClass('active');
act.isActive = true;
if (typeof act.workbench !== 'undefined') {
act.workbench.removeClass('hide');
}
console.log('Invoking the registered workbench');
console.log(act);
//after this call the registered callback
fn(act, context);
})
this.activities.push(act);
}
} | activity | identifier_name |
wijmo.angular2.grid.filter.min.js | /*
*
* Wijmo Library 5.20162.188
* http://wijmo.com/
*
* Copyright(c) GrapeCity, Inc. All rights reserved.
*
* Licensed under the Wijmo Commercial License.
* sales@wijmo.com
* http://wijmo.com/products/wijmo-5/license/
*
*/
System.register("wijmo/wijmo.angular2.grid.filter", ['@angular/core', 'wijmo/wijmo.angular2.directiveBase', 'wijmo/wijmo.angular2.grid'], function(exports_1, context_1)
{
"use strict";
var __moduleName=context_1 && context_1.id,
__extends=this && this.__extends || function(d, b)
{
function __()
{
this.constructor = d
}
for (var p in b)
b.hasOwnProperty(p) && (d[p] = b[p]);
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __)
},
__decorate=this && this.__decorate || function(decorators, target, key, desc)
{
var c=arguments.length,
r=c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc,
d,
i;
if (typeof Reflect == "object" && typeof Reflect.decorate == "function")
r = Reflect.decorate(decorators, target, key, desc);
else
for (i = decorators.length - 1; i >= 0; i--)
(d = decorators[i]) && (r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r);
return c > 3 && r && Object.defineProperty(target, key, r), r
},
__param=this && this.__param || function(paramIndex, decorator)
{
return function(target, key)
{
decorator(target, key, paramIndex)
}
},
core_1,
wijmo_angular2_directiveBase_1,
wijmo_angular2_grid_1,
WjFlexGridFilter;
return {
setters: [function(core_1_1)
{
core_1 = core_1_1
}, function(wijmo_angular2_directiveBase_1_1)
{
wijmo_angular2_directiveBase_1 = wijmo_angular2_directiveBase_1_1
}, function(wijmo_angular2_grid_1_1)
{
wijmo_angular2_grid_1 = wijmo_angular2_grid_1_1
}], execute: function()
{
WjFlexGridFilter = function(_super)
{
function WjFlexGridFilter(elRef, injector)
{
| return __extends(WjFlexGridFilter, _super), WjFlexGridFilter = __decorate([wijmo_angular2_directiveBase_1.WjComponent({
selector: 'wj-flex-grid-filter', template: "", wjParentDirectives: [wijmo_angular2_grid_1.WjFlexGrid]
}), __param(0, core_1.Inject(core_1.ElementRef)), __param(1, core_1.Inject(core_1.Injector))], WjFlexGridFilter)
}(wijmo.grid.filter.FlexGridFilter);
exports_1("WjFlexGridFilter", WjFlexGridFilter)
}
}
}) |
var parentCmp=wijmo_angular2_directiveBase_1.WjDirectiveBehavior.findTypeParentBehavior(injector, WjFlexGridFilter).directive;
_super.call(this, parentCmp);
wijmo_angular2_directiveBase_1.WjDirectiveBehavior.attach(this, elRef, injector)
}
| identifier_body |
wijmo.angular2.grid.filter.min.js | /*
*
* Wijmo Library 5.20162.188
* http://wijmo.com/
*
* Copyright(c) GrapeCity, Inc. All rights reserved.
*
* Licensed under the Wijmo Commercial License.
* sales@wijmo.com
* http://wijmo.com/products/wijmo-5/license/
*
*/
System.register("wijmo/wijmo.angular2.grid.filter", ['@angular/core', 'wijmo/wijmo.angular2.directiveBase', 'wijmo/wijmo.angular2.grid'], function(exports_1, context_1)
{
"use strict";
var __moduleName=context_1 && context_1.id,
__extends=this && this.__extends || function(d, b)
{
function __()
{
this.constructor = d
}
for (var p in b)
b.hasOwnProperty(p) && (d[p] = b[p]);
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __)
},
__decorate=this && this.__decorate || function(decorators, target, key, desc)
{
var c=arguments.length,
r=c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc,
d,
i;
if (typeof Reflect == "object" && typeof Reflect.decorate == "function")
r = Reflect.decorate(decorators, target, key, desc);
else
for (i = decorators.length - 1; i >= 0; i--)
(d = decorators[i]) && (r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r);
return c > 3 && r && Object.defineProperty(target, key, r), r
},
__param=this && this.__param || function(paramIndex, decorator)
{
return function(target, key)
{
decorator(target, key, paramIndex)
}
},
core_1,
wijmo_angular2_directiveBase_1,
wijmo_angular2_grid_1,
| }, function(wijmo_angular2_directiveBase_1_1)
{
wijmo_angular2_directiveBase_1 = wijmo_angular2_directiveBase_1_1
}, function(wijmo_angular2_grid_1_1)
{
wijmo_angular2_grid_1 = wijmo_angular2_grid_1_1
}], execute: function()
{
WjFlexGridFilter = function(_super)
{
function WjFlexGridFilter(elRef, injector)
{
var parentCmp=wijmo_angular2_directiveBase_1.WjDirectiveBehavior.findTypeParentBehavior(injector, WjFlexGridFilter).directive;
_super.call(this, parentCmp);
wijmo_angular2_directiveBase_1.WjDirectiveBehavior.attach(this, elRef, injector)
}
return __extends(WjFlexGridFilter, _super), WjFlexGridFilter = __decorate([wijmo_angular2_directiveBase_1.WjComponent({
selector: 'wj-flex-grid-filter', template: "", wjParentDirectives: [wijmo_angular2_grid_1.WjFlexGrid]
}), __param(0, core_1.Inject(core_1.ElementRef)), __param(1, core_1.Inject(core_1.Injector))], WjFlexGridFilter)
}(wijmo.grid.filter.FlexGridFilter);
exports_1("WjFlexGridFilter", WjFlexGridFilter)
}
}
}) | WjFlexGridFilter;
return {
setters: [function(core_1_1)
{
core_1 = core_1_1
| random_line_split |
wijmo.angular2.grid.filter.min.js | /*
*
* Wijmo Library 5.20162.188
* http://wijmo.com/
*
* Copyright(c) GrapeCity, Inc. All rights reserved.
*
* Licensed under the Wijmo Commercial License.
* sales@wijmo.com
* http://wijmo.com/products/wijmo-5/license/
*
*/
System.register("wijmo/wijmo.angular2.grid.filter", ['@angular/core', 'wijmo/wijmo.angular2.directiveBase', 'wijmo/wijmo.angular2.grid'], function(exports_1, context_1)
{
"use strict";
var __moduleName=context_1 && context_1.id,
__extends=this && this.__extends || function(d, b)
{
function __()
{
this.constructor = d
}
for (var p in b)
b.hasOwnProperty(p) && (d[p] = b[p]);
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __)
},
__decorate=this && this.__decorate || function(decorators, target, key, desc)
{
var c=arguments.length,
r=c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc,
d,
i;
if (typeof Reflect == "object" && typeof Reflect.decorate == "function")
r = Reflect.decorate(decorators, target, key, desc);
else
for (i = decorators.length - 1; i >= 0; i--)
(d = decorators[i]) && (r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r);
return c > 3 && r && Object.defineProperty(target, key, r), r
},
__param=this && this.__param || function(paramIndex, decorator)
{
return function(target, key)
{
decorator(target, key, paramIndex)
}
},
core_1,
wijmo_angular2_directiveBase_1,
wijmo_angular2_grid_1,
WjFlexGridFilter;
return {
setters: [function(core_1_1)
{
core_1 = core_1_1
}, function(wijmo_angular2_directiveBase_1_1)
{
wijmo_angular2_directiveBase_1 = wijmo_angular2_directiveBase_1_1
}, function(wijmo_angular2_grid_1_1)
{
wijmo_angular2_grid_1 = wijmo_angular2_grid_1_1
}], execute: function()
{
WjFlexGridFilter = function(_super)
{
function Wj | lRef, injector)
{
var parentCmp=wijmo_angular2_directiveBase_1.WjDirectiveBehavior.findTypeParentBehavior(injector, WjFlexGridFilter).directive;
_super.call(this, parentCmp);
wijmo_angular2_directiveBase_1.WjDirectiveBehavior.attach(this, elRef, injector)
}
return __extends(WjFlexGridFilter, _super), WjFlexGridFilter = __decorate([wijmo_angular2_directiveBase_1.WjComponent({
selector: 'wj-flex-grid-filter', template: "", wjParentDirectives: [wijmo_angular2_grid_1.WjFlexGrid]
}), __param(0, core_1.Inject(core_1.ElementRef)), __param(1, core_1.Inject(core_1.Injector))], WjFlexGridFilter)
}(wijmo.grid.filter.FlexGridFilter);
exports_1("WjFlexGridFilter", WjFlexGridFilter)
}
}
}) | FlexGridFilter(e | identifier_name |
isyeventmonitor.py | import base64
import errno
import random
import ssl
import threading
import time
import copy
import websocket
import xmltodict
import config
import debug
from utils import exitutils
import hubs.isy.isycodes as isycodes
import logsupport
from controlevents import CEvent, PostEvent, ConsoleEvent, PostIfInterested
from hubs.isy.isycodes import EVENT_CTRL, formatwsitem
from logsupport import ConsoleWarning, ConsoleError, ConsoleDetail, ConsoleDetailHigh
from utils.threadmanager import ThreadStartException
from utils.utilfuncs import safeprint
class ISYEMInternalError(Exception):
pass
def BaseAddr(addr):
return None if addr is None else ' '.join(addr.split(' ')[0:-1])
class ISYEventMonitor(object):
def __init__(self, thisISY):
self.connectionmode = 'try994' # trypolisy: trying without subp, polisy: connection worked, try994: trying with subp 994worked.
self.isy = thisISY
self.hubname = thisISY.name
self.QHnum = 1
self.a = base64.b64encode((self.isy.user + ':' + self.isy.password).encode('utf-8'))
self.watchstarttime = time.time()
self.watchlist = []
self.streamid = "unset"
self.seq = 0
self.lastheartbeat = 0
self.hbcount = 0
self.AlertNodes = {}
self.delayedstart = 0
self.longdown = 0
self.WS = None
self.THstate = 'init'
self.querycnt = 0
self.queryqueued = {}
self.LastMsgErr = ('***', -99)
self.isy.Busy = 0
self.lasterror = 'Init'
debug.debugPrint('DaemonCtl', "Queue Handler ", self.QHnum, " started: ", self.watchstarttime)
self.reportablecodes = ["DON", "DFON", "DOF", "DFOF", "ST", "CLISP", "CLISPH", "CLISPC", "CLIFS",
"CLIMD", "CLIHUM", "CLIHCS", "BRT", "DIM"] # "RR", "OL",
def EndWSServer(self):
self.lasterror = "DirectCommError"
self.WS.close()
def RealQuery(self, enode, seq, ndnm):
logsupport.Logs.Log("Queued query attempt (" + str(seq) + ") for: " + ndnm)
time.sleep(105 + random.randint(0, 30)) # allow any in progress query at ISY a chance to clear
if enode not in self.isy.ErrNodes:
logsupport.Logs.Log("Node error cleared without need of query (" + str(seq) + ") for: " + ndnm)
return
logsupport.Logs.Log(self.hubname + ": Attempt query (" + str(seq) + ") for errored node: " + ndnm,
severity=ConsoleWarning)
r = self.isy.try_ISY_comm('query/' + enode, timeout=60, closeonfail=False)
if r == '':
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt failed for node: " + ndnm,
severity=ConsoleWarning)
else:
time.sleep(2)
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt succeeded for node: " + ndnm)
if enode in self.isy.ErrNodes: del self.isy.ErrNodes[enode]
if enode in self.queryqueued: del self.queryqueued[enode]
def DoNodeQuery(self, enode, ndnm):
if enode not in self.queryqueued:
self.querycnt += 1
self.queryqueued[enode] = self.querycnt
t = threading.Thread(name='Query-' + str(self.querycnt) + '-' + enode, target=self.RealQuery, daemon=True,
args=(enode, self.querycnt, ndnm))
t.start()
else:
logsupport.Logs.Log(
self.hubname + ": Query " + str(self.queryqueued[enode]) + " already queued for node: " + ndnm)
def FakeNodeChange(self):
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.HubNodeChange, hub=self.isy.name, node=None, value=-1))
def reinit(self):
self.watchstarttime = time.time()
self.watchlist = []
self.seq = 0
self.hbcount = 0
self.QHnum += 1
def PostStartQHThread(self):
if self.isy.version == -1:
# test mode
return
hungcount = 40
while self.THstate == 'restarting':
logsupport.Logs.Log(self.hubname + " Waiting thread start")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
while self.THstate == 'delaying':
time.sleep(1)
hungcount = 60
while self.THstate == 'starting':
|
if self.THstate == 'running':
self.isy._HubOnline = True
self.isy.Vars.CheckValsUpToDate(reload=True)
logsupport.Logs.Log(self.hubname + ": Initial status streamed ", self.seq, " items and vars updated")
elif self.THstate == 'failed':
logsupport.Logs.Log(self.hubname + " Failed Thread Restart", severity=ConsoleWarning)
else:
logsupport.Logs.Log(self.hubname + " Unknown ISY QH Thread state")
def PreRestartQHThread(self):
self.isy._HubOnline = False
self.THstate = 'restarting'
try:
if self.lasterror == 'ISYSocketTimeOut':
logsupport.Logs.Log(self.hubname + '(TimeoutError) Wait for likely router reboot or down',
severity=ConsoleWarning, tb=False)
self.delayedstart = 150
self.reinit()
return
if self.lasterror == 'ISYWSTimeOut':
logsupport.Logs.Log(self.hubname + ' WS restart after surprise close - short delay (15)',
severity=ConsoleWarning)
self.delayedstart = 15
elif self.lasterror == 'ISYNetDown':
# likely home network down so wait a bit
logsupport.Logs.Log(self.hubname + ' WS restart for NETUNREACH - delay likely router reboot or down',
severity=ConsoleWarning)
self.delayedstart = 121
elif self.lasterror == 'ISYClose':
logsupport.Logs.Log(self.hubname + ' Recovering closed WS stream')
self.delayedstart = 2
elif self.lasterror == 'DirectCommError':
logsupport.Logs.Log(self.hubname + ' WS restart because of failed direct communication failure')
self.delayedstart = 90 # probably ISY doing query
elif self.lasterror == 'ISYNoRoute':
logsupport.Logs.Log("{}: Hub probably down (semi) permanently ({})".self.name, self.longdown)
self.delayedstart = 3600 + self.longdown * 1800 # spread checks way out
self.isy._HubOnline = False
self.longdown += 1
else:
logsupport.Logs.Log(self.hubname + ' Unexpected error on WS stream: ', self.lasterror,
severity=ConsoleError, tb=False)
self.delayedstart = 90
except Exception as e:
logsupport.Logs.Log(self.hubname + ' PreRestartQH internal error ', e)
self.reinit()
def QHandler(self):
def on_error(qws, error):
self.isy.HBWS.Entry(repr(error))
self.lasterror = "ISYUnknown"
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
if isinstance(error, websocket.WebSocketConnectionClosedException):
logsupport.Logs.Log(self.hubname + " WS connection closed - attempt to recontact ISY",
severity=reconsev)
self.lasterror = 'ISYClose'
elif isinstance(error, websocket.WebSocketTimeoutException):
logsupport.Logs.Log(self.hubname + " WS connection timed out", severity=ConsoleWarning)
self.lasterror = 'ISYWSTimeOut'
elif isinstance(error, TimeoutError):
logsupport.Logs.Log(self.hubname + " WS socket timed out", severity=ConsoleWarning)
self.lasterror = 'ISYSocketTimeOut'
elif isinstance(error, AttributeError):
logsupport.Logs.Log(self.hubname + " WS library bug", severity=ConsoleWarning)
self.lasterror = 'ISYClose'
elif isinstance(error, OSError):
if error.errno == errno.ENETUNREACH:
logsupport.Logs.Log(self.hubname + " WS network down", severity=ConsoleWarning)
self.lasterror = 'ISYNetDown'
else:
logsupport.Logs.Log(self.hubname + ' WS OS error', repr(error), severity=ConsoleError, tb=False)
self.lasterror = 'ISYNoRoute' # probably semi permanent failure
else:
if self.connectionmode == 'try994':
logsupport.Logs.Log("{}: Connection failed using 994 convention".format(self.hubname))
self.connectionmode = 'trypolisy'
elif self.connectionmode == 'trypolisy':
logsupport.Logs.Log("{}: Connection failed using Polisy convention".format(self.hubname))
self.connectionmode = 'try994'
else:
logsupport.Logs.Log(self.hubname + " Error in WS stream " + str(self.QHnum) + ': ' + repr(error),
severity=ConsoleError,
tb=True)
logsupport.Logs.Log(repr(websocket.WebSocketConnectionClosedException))
self.THstate = 'failed'
debug.debugPrint('DaemonCtl', "Websocket stream error", self.QHnum, repr(error))
qws.close()
# noinspection PyUnusedLocal
def on_close(qws, code, reason):
self.isy.HBWS.Entry("Close")
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log("{} WS stream {} closed: {}:{}".format(self.hubname, self.QHnum, code, reason),
severity=reconsev, hb=True)
debug.debugPrint('DaemonCtl', "ISY Websocket stream closed", str(code), str(reason))
def on_open(qws):
self.isy.HBWS.Entry("Open")
self.THstate = 'starting'
if self.connectionmode == 'try994':
self.connectionmode = '994worked'
logsupport.Logs.Log('{} connection worked using 994 convention'.format(self.isy.name))
elif self.connectionmode == 'trypolisy':
self.connectionmode = 'polisyworked'
logsupport.Logs.Log('{} connection worked using Polisy convention'.format(self.isy.name))
mess = '994' if self.connectionmode == '994worked' else 'Polisy' if self.connectionmode == 'polisyworked' else self.connectionmode
logsupport.Logs.Log("{}: WS stream {} opened ({})".format(self.hubname, self.QHnum, mess))
debug.debugPrint('DaemonCtl', "Websocket stream opened: ", self.QHnum, self.streamid)
self.WS = qws
# noinspection PyUnusedLocal,PyUnboundLocalVariable
def on_message(qws, message):
loopstart = time.time()
self.isy.HBWS.Entry('Message: {}'.format(repr(message)))
# print('Message: {}'.format(message))
try:
m = 'parse error'
m = xmltodict.parse(message)
msav = copy.deepcopy(m)
if debug.dbgStore.GetVal('ISYDump'):
debug.ISYDump("isystream.dmp", message, pretty=False)
# print(m)
if 'SubscriptionResponse' in m:
sr = m['SubscriptionResponse']
if self.streamid != sr['SID']:
self.streamid = sr['SID']
logsupport.Logs.Log("{}: Stream id: {}".format(self.hubname, self.streamid))
elif 'Event' in m:
E = m['Event']
esid = E.pop('@sid', 'No sid')
if self.streamid != esid:
logsupport.Logs.Log(
self.hubname + " Unexpected event stream change: " + self.streamid + "/" + str(esid),
severity=ConsoleError, tb=False)
exitutils.FatalError("WS Stream ID Changed")
eseq = int(E.pop('@seqnum', -99))
if self.seq != eseq:
logsupport.Logs.Log(
self.hubname + " Event mismatch - Expected: " + str(self.seq) + " Got: " + str(eseq),
severity=ConsoleWarning)
raise ISYEMInternalError
else:
self.seq += 1
ecode = E.pop('control', 'Missing control')
if ecode in EVENT_CTRL:
prcode = EVENT_CTRL[ecode]
else:
prcode = "**" + ecode + "**"
eaction = E.pop('action', 'No action')
enode = E.pop('node', 'No node')
eInfo = E.pop('eventInfo', 'No EventInfo')
if isinstance(eaction, dict):
debug.debugPrint('DaemonStream', "V5 stream - pull up action value: ", eaction)
eaction = eaction["#text"] # the new xmltodict will return as data['action']['#text']
if enode in self.isy.NodesByAddr: # get the node to set if any
N = self.isy.NodesByAddr[enode]
else:
N = None
if ecode == 'ST': # update cached state first before posting alerts or race
if isinstance(N, isycodes.ThermType):
N.cur = isycodes.NormalizeState(eaction)
elif N is not None:
oldstate = N.devState
N.devState = isycodes.NormalizeState(eaction)
logsupport.Logs.Log('ISYchg', 'ISY Node: ', N.name, ' state change from: ', oldstate,
' to: ', N.devState, severity=ConsoleDetailHigh)
if (oldstate == N.devState) and self.THstate == 'running':
logsupport.Logs.Log(self.hubname +
" State report with no change: " + N.name + ' state: ' + str(
oldstate))
else:
logsupport.Logs.Log(self.hubname +
" Status change for " + N.name + '(' + str(enode) + ') to ' + str(
N.devState), severity=ConsoleDetailHigh)
# status changed to post to any alerts that want it
# since alerts can only react to the state of a node we check only on an ST message
# screens on the other hand may need to know about other actions (thermostat e.g.)
# so they get checked below under reportablecodes
# if I check alerts there I get extra invocations for the DON and DOF e.g. which while not
# harmful are anomolous
if enode in self.AlertNodes:
# alert node changed
debug.debugPrint('DaemonCtl', 'ISY reports change(alert):',
self.isy.NodesByAddr[enode].name)
for a in self.AlertNodes[enode]:
if self.THstate != 'running':
# this is a restart or initial dump so indicate upwards to avoid misleading log entry
if a.state == 'Armed':
a.state = 'Init'
logsupport.Logs.Log(self.hubname + " Node alert fired: " + str(a),
severity=ConsoleDetail)
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.ISYAlert, hub=self.isy.name, node=enode,
value=isycodes.NormalizeState(eaction), alert=a))
elif ecode == 'CLIHCS' and isinstance(N, isycodes.ThermType):
N.statecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIFS' and isinstance(N, isycodes.ThermType):
N.fancode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIMD' and isinstance(N, isycodes.ThermType):
N.modecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIHUM' and isinstance(N, isycodes.ThermType):
N.hum = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPH' and isinstance(N, isycodes.ThermType):
N.setlow = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPC' and isinstance(N, isycodes.ThermType):
N.sethigh = isycodes.NormalizeState(eaction)
if ecode in self.reportablecodes:
# Node change report
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
"Status update in stream: ",
eseq, ":",
prcode, " : ", enode, " : ", eInfo, " : ", eaction)
# logsupport.Logs.Log('reportable event '+str(ecode)+' for '+str(enode)+' action '+str(eaction))
PostIfInterested(self.isy, enode, isycodes.NormalizeState(eaction))
elif (prcode == 'Trigger') and (eaction == '6'):
vinfo = eInfo['var']
vartype = int(vinfo['@type'])
varid = int(vinfo['@id'])
varval = int(vinfo['val'])
debug.debugPrint('DaemonCtl', 'Var change: ', self.isy.Vars.GetNameFromAttr((vartype, varid)),
' set to ', varval)
debug.debugPrint('DaemonCtl', 'Var change:', ('Unkn', 'Integer', 'State')[vartype],
' variable ', varid,
' set to ', varval)
try:
self.isy.Vars.SetValByAttr((vartype, varid), varval, modifier=True)
except KeyError:
logsupport.Logs.Log(
"Unknown variable from " + self.hubname + " - probably added since startup",
severity=ConsoleWarning)
elif prcode == 'Heartbeat':
if self.hbcount > 0:
# wait 2 heartbeats
self.THstate = 'running'
self.lastheartbeat = time.time()
self.hbcount += 1
elif prcode == 'Billing':
self.THstate = 'running'
else:
pass # handle any other?
efmtact = E.pop('fmtAct', 'v4stream')
efmtnm = E.pop('fmtName', 'noName')
if E:
lev = ConsoleDetailHigh if str(
enode) in self.isy.V3Nodes else ConsoleWarning # supress to detail if it is a V3 node
logsupport.Logs.Log(
self.hubname + " Extra info in event: " + str(ecode) + '/' + str(prcode) + '/' + str(
eaction) + '/' + str(enode) + '/' + str(eInfo) + ' ' + str(E), severity=lev)
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
formatwsitem(esid, eseq, ecode, eaction, enode, eInfo, E, self.isy))
try:
isynd = self.isy.NodesByAddr[enode].name
except (KeyError, AttributeError):
isynd = enode
if ecode == '_5':
now = time.time()
if str(eaction) == '1':
# logsupport.Logs.Log(self.hubname, ' went busy')
self.isy.Busy = now
elif str(eaction) == '0':
if self.isy.Busy != 0:
# logsupport.Logs.Log(self.hubname, " cleared busy")
if now - self.isy.Busy > 10:
logsupport.Logs.Log(
"{}: busy for {:.4f} seconds".format(self.hubname, now - self.isy.Busy))
self.isy.Busy = 0
else:
logsupport.Logs.Log(self.hubname, " reported stand-alone not busy")
else:
logsupport.Logs.Log(self.hubname, " reported System Status: ", str(eaction))
if ecode == "ST" or (ecode == "_3" and eaction == "CE"):
if self.LastMsgErr[0] != '***' and (
BaseAddr(self.LastMsgErr[0]) == BaseAddr(enode)):
# ERR msg followed by clearing - ISY weirdness?
logsupport.Logs.Log(
"{} reported and immediately cleared error for node: {} ({}) (seq:{}/{})".format(
self.hubname,
isynd, BaseAddr(self.LastMsgErr[0]), self.LastMsgErr[1], eseq),
severity=ConsoleWarning, hb=True)
self.LastMsgErr = ('***', -99)
elif enode in self.isy.ErrNodes:
logsupport.Logs.Log("{} cleared comm error for node: {}".format(self.hubname, isynd))
if enode in self.isy.ErrNodes:
# logsupport.Logs.Log("Query thread still running")
del self.isy.ErrNodes[enode]
if self.LastMsgErr != ('***', -99):
# previous message was ERR and wasn't immediately cleared
try:
isyerrnd = self.isy.NodesByAddr[self.LastMsgErr[0]].name
except (KeyError, AttributeError):
isyerrnd = self.LastMsgErr[0]
logsupport.Logs.Log(
"{} WS stream shows comm error for node: {}(Seq:{})".format(self.hubname, isyerrnd,
self.LastMsgErr[1]),
severity=ConsoleWarning, hb=True)
if self.LastMsgErr[0] not in self.isy.ErrNodes:
self.isy.ErrNodes[self.LastMsgErr[0]] = eseq
self.DoNodeQuery(self.LastMsgErr[0], isyerrnd)
self.LastMsgErr = ('***', -99)
if ecode == "ERR":
if str(eaction) == "0":
pass
# logsupport.Logs.Log("ERR(0) seen: {}".format(repr(m)))
else:
# Note the error and wait one message to see if it immediately clears
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log("ERR(1) seen: {}".format(repr(xmltodict.parse(message))),
severity=ConsoleWarning)
if ecode == "_3" and eaction == "NE":
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log(
"{} WS stream reported NE error code on WS stream for node{}(Seq:{})".format(self.hubname,
isynd, eseq),
hb=True)
else:
logsupport.Logs.Log(self.hubname + " Strange item in event stream: " + str(m),
severity=ConsoleWarning)
safeprint(message)
except Exception as E:
logsupport.Logs.Log(self.hubname + " Exception in QH on message: ", repr(msav), ' Excp: ', repr(E),
severity=ConsoleWarning)
loopend = time.time()
self.isy.HBWS.Entry('Processing time: {} Done: {}'.format(loopend - loopstart, repr(
message)))
time.sleep(.001) # force thread to give up processor to allow response to time events
if self.isy.version == -1:
self.isy._HubOnline = True
time.sleep(7)
with open('/home/pi/Console/isystream.dmp', 'r') as f:
mes = f.readline() # absorb first
# safeprint("Message1: {}".format(mes))
while True:
mes = f.readline().rstrip('\n')
if mes == '':
# safeprint('Done')
break
# safeprint("Message: {}".format(mes))
on_message(None, mes)
time.sleep(.4)
while True:
time.sleep(500)
return
self.THstate = 'delaying'
logsupport.Logs.Log("{}: WS stream thread {} setup".format(self.hubname, self.QHnum), severity=ConsoleDetail)
if self.delayedstart != 0:
logsupport.Logs.Log(self.hubname + " Delaying Hub restart for probable network reset: ",
str(self.delayedstart), ' seconds')
time.sleep(self.delayedstart)
# websocket.enableTrace(True)
websocket.setdefaulttimeout(30)
if self.isy.addr.startswith('http://'):
wsurl = 'ws://' + self.isy.addr[7:] + '/rest/subscribe'
elif self.isy.addr.startswith('https://'):
wsurl = 'wss://' + self.isy.addr[8:] + '/rest/subscribe'
else:
wsurl = 'ws://' + self.isy.addr + '/rest/subscribe'
import logging
WStrace = open('/home/pi/WStrace', 'w')
print('Open {}'.format(wsurl), file=WStrace)
websocket.enableTrace(True, handler=logging.StreamHandler(stream=WStrace))
while True:
try:
# noinspection PyArgumentList
if self.connectionmode in ('trypolisy', 'polisyworked'):
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
else:
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
subprotocols=['ISYSUB'],
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
break
except AttributeError as e:
logsupport.Logs.Log(self.hubname + " Problem starting WS handler - retrying: ", repr(e))
self.lastheartbeat = time.time()
ws.run_forever(ping_timeout=999, sslopt={"cert_reqs": ssl.CERT_NONE})
self.THstate = 'failed'
self.isy._HubOnline = False
sev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log(self.hubname + " QH Thread " + str(self.QHnum) + " exiting", severity=sev,
tb=False)
| logsupport.Logs.Log(self.hubname + ": Waiting initial status dump")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException | conditional_block |
isyeventmonitor.py | import base64
import errno
import random
import ssl
import threading
import time
import copy
import websocket
import xmltodict
import config
import debug
from utils import exitutils
import hubs.isy.isycodes as isycodes
import logsupport
from controlevents import CEvent, PostEvent, ConsoleEvent, PostIfInterested
from hubs.isy.isycodes import EVENT_CTRL, formatwsitem
from logsupport import ConsoleWarning, ConsoleError, ConsoleDetail, ConsoleDetailHigh
from utils.threadmanager import ThreadStartException
from utils.utilfuncs import safeprint
class ISYEMInternalError(Exception):
pass
def BaseAddr(addr):
return None if addr is None else ' '.join(addr.split(' ')[0:-1])
class ISYEventMonitor(object):
def __init__(self, thisISY):
self.connectionmode = 'try994' # trypolisy: trying without subp, polisy: connection worked, try994: trying with subp 994worked.
self.isy = thisISY
self.hubname = thisISY.name
self.QHnum = 1
self.a = base64.b64encode((self.isy.user + ':' + self.isy.password).encode('utf-8'))
self.watchstarttime = time.time()
self.watchlist = []
self.streamid = "unset"
self.seq = 0
self.lastheartbeat = 0
self.hbcount = 0
self.AlertNodes = {}
self.delayedstart = 0
self.longdown = 0
self.WS = None
self.THstate = 'init'
self.querycnt = 0
self.queryqueued = {}
self.LastMsgErr = ('***', -99)
self.isy.Busy = 0
self.lasterror = 'Init'
debug.debugPrint('DaemonCtl', "Queue Handler ", self.QHnum, " started: ", self.watchstarttime)
self.reportablecodes = ["DON", "DFON", "DOF", "DFOF", "ST", "CLISP", "CLISPH", "CLISPC", "CLIFS",
"CLIMD", "CLIHUM", "CLIHCS", "BRT", "DIM"] # "RR", "OL",
def EndWSServer(self):
self.lasterror = "DirectCommError"
self.WS.close()
def RealQuery(self, enode, seq, ndnm):
logsupport.Logs.Log("Queued query attempt (" + str(seq) + ") for: " + ndnm)
time.sleep(105 + random.randint(0, 30)) # allow any in progress query at ISY a chance to clear
if enode not in self.isy.ErrNodes:
logsupport.Logs.Log("Node error cleared without need of query (" + str(seq) + ") for: " + ndnm)
return
logsupport.Logs.Log(self.hubname + ": Attempt query (" + str(seq) + ") for errored node: " + ndnm,
severity=ConsoleWarning)
r = self.isy.try_ISY_comm('query/' + enode, timeout=60, closeonfail=False)
if r == '':
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt failed for node: " + ndnm,
severity=ConsoleWarning)
else:
time.sleep(2)
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt succeeded for node: " + ndnm)
if enode in self.isy.ErrNodes: del self.isy.ErrNodes[enode]
if enode in self.queryqueued: del self.queryqueued[enode]
def DoNodeQuery(self, enode, ndnm):
if enode not in self.queryqueued:
self.querycnt += 1
self.queryqueued[enode] = self.querycnt
t = threading.Thread(name='Query-' + str(self.querycnt) + '-' + enode, target=self.RealQuery, daemon=True,
args=(enode, self.querycnt, ndnm))
t.start()
else:
logsupport.Logs.Log(
self.hubname + ": Query " + str(self.queryqueued[enode]) + " already queued for node: " + ndnm)
def FakeNodeChange(self):
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.HubNodeChange, hub=self.isy.name, node=None, value=-1))
def reinit(self):
self.watchstarttime = time.time()
self.watchlist = []
self.seq = 0
self.hbcount = 0
self.QHnum += 1
def PostStartQHThread(self):
if self.isy.version == -1:
# test mode
return
hungcount = 40
while self.THstate == 'restarting':
logsupport.Logs.Log(self.hubname + " Waiting thread start")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
while self.THstate == 'delaying':
time.sleep(1)
hungcount = 60
while self.THstate == 'starting':
logsupport.Logs.Log(self.hubname + ": Waiting initial status dump")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
if self.THstate == 'running':
self.isy._HubOnline = True
self.isy.Vars.CheckValsUpToDate(reload=True)
logsupport.Logs.Log(self.hubname + ": Initial status streamed ", self.seq, " items and vars updated")
elif self.THstate == 'failed':
logsupport.Logs.Log(self.hubname + " Failed Thread Restart", severity=ConsoleWarning)
else:
logsupport.Logs.Log(self.hubname + " Unknown ISY QH Thread state")
def PreRestartQHThread(self):
self.isy._HubOnline = False
self.THstate = 'restarting'
try:
if self.lasterror == 'ISYSocketTimeOut':
logsupport.Logs.Log(self.hubname + '(TimeoutError) Wait for likely router reboot or down',
severity=ConsoleWarning, tb=False)
self.delayedstart = 150
self.reinit()
return
if self.lasterror == 'ISYWSTimeOut':
logsupport.Logs.Log(self.hubname + ' WS restart after surprise close - short delay (15)',
severity=ConsoleWarning)
self.delayedstart = 15 | severity=ConsoleWarning)
self.delayedstart = 121
elif self.lasterror == 'ISYClose':
logsupport.Logs.Log(self.hubname + ' Recovering closed WS stream')
self.delayedstart = 2
elif self.lasterror == 'DirectCommError':
logsupport.Logs.Log(self.hubname + ' WS restart because of failed direct communication failure')
self.delayedstart = 90 # probably ISY doing query
elif self.lasterror == 'ISYNoRoute':
logsupport.Logs.Log("{}: Hub probably down (semi) permanently ({})".self.name, self.longdown)
self.delayedstart = 3600 + self.longdown * 1800 # spread checks way out
self.isy._HubOnline = False
self.longdown += 1
else:
logsupport.Logs.Log(self.hubname + ' Unexpected error on WS stream: ', self.lasterror,
severity=ConsoleError, tb=False)
self.delayedstart = 90
except Exception as e:
logsupport.Logs.Log(self.hubname + ' PreRestartQH internal error ', e)
self.reinit()
def QHandler(self):
def on_error(qws, error):
self.isy.HBWS.Entry(repr(error))
self.lasterror = "ISYUnknown"
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
if isinstance(error, websocket.WebSocketConnectionClosedException):
logsupport.Logs.Log(self.hubname + " WS connection closed - attempt to recontact ISY",
severity=reconsev)
self.lasterror = 'ISYClose'
elif isinstance(error, websocket.WebSocketTimeoutException):
logsupport.Logs.Log(self.hubname + " WS connection timed out", severity=ConsoleWarning)
self.lasterror = 'ISYWSTimeOut'
elif isinstance(error, TimeoutError):
logsupport.Logs.Log(self.hubname + " WS socket timed out", severity=ConsoleWarning)
self.lasterror = 'ISYSocketTimeOut'
elif isinstance(error, AttributeError):
logsupport.Logs.Log(self.hubname + " WS library bug", severity=ConsoleWarning)
self.lasterror = 'ISYClose'
elif isinstance(error, OSError):
if error.errno == errno.ENETUNREACH:
logsupport.Logs.Log(self.hubname + " WS network down", severity=ConsoleWarning)
self.lasterror = 'ISYNetDown'
else:
logsupport.Logs.Log(self.hubname + ' WS OS error', repr(error), severity=ConsoleError, tb=False)
self.lasterror = 'ISYNoRoute' # probably semi permanent failure
else:
if self.connectionmode == 'try994':
logsupport.Logs.Log("{}: Connection failed using 994 convention".format(self.hubname))
self.connectionmode = 'trypolisy'
elif self.connectionmode == 'trypolisy':
logsupport.Logs.Log("{}: Connection failed using Polisy convention".format(self.hubname))
self.connectionmode = 'try994'
else:
logsupport.Logs.Log(self.hubname + " Error in WS stream " + str(self.QHnum) + ': ' + repr(error),
severity=ConsoleError,
tb=True)
logsupport.Logs.Log(repr(websocket.WebSocketConnectionClosedException))
self.THstate = 'failed'
debug.debugPrint('DaemonCtl', "Websocket stream error", self.QHnum, repr(error))
qws.close()
# noinspection PyUnusedLocal
def on_close(qws, code, reason):
self.isy.HBWS.Entry("Close")
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log("{} WS stream {} closed: {}:{}".format(self.hubname, self.QHnum, code, reason),
severity=reconsev, hb=True)
debug.debugPrint('DaemonCtl', "ISY Websocket stream closed", str(code), str(reason))
def on_open(qws):
self.isy.HBWS.Entry("Open")
self.THstate = 'starting'
if self.connectionmode == 'try994':
self.connectionmode = '994worked'
logsupport.Logs.Log('{} connection worked using 994 convention'.format(self.isy.name))
elif self.connectionmode == 'trypolisy':
self.connectionmode = 'polisyworked'
logsupport.Logs.Log('{} connection worked using Polisy convention'.format(self.isy.name))
mess = '994' if self.connectionmode == '994worked' else 'Polisy' if self.connectionmode == 'polisyworked' else self.connectionmode
logsupport.Logs.Log("{}: WS stream {} opened ({})".format(self.hubname, self.QHnum, mess))
debug.debugPrint('DaemonCtl', "Websocket stream opened: ", self.QHnum, self.streamid)
self.WS = qws
# noinspection PyUnusedLocal,PyUnboundLocalVariable
def on_message(qws, message):
loopstart = time.time()
self.isy.HBWS.Entry('Message: {}'.format(repr(message)))
# print('Message: {}'.format(message))
try:
m = 'parse error'
m = xmltodict.parse(message)
msav = copy.deepcopy(m)
if debug.dbgStore.GetVal('ISYDump'):
debug.ISYDump("isystream.dmp", message, pretty=False)
# print(m)
if 'SubscriptionResponse' in m:
sr = m['SubscriptionResponse']
if self.streamid != sr['SID']:
self.streamid = sr['SID']
logsupport.Logs.Log("{}: Stream id: {}".format(self.hubname, self.streamid))
elif 'Event' in m:
E = m['Event']
esid = E.pop('@sid', 'No sid')
if self.streamid != esid:
logsupport.Logs.Log(
self.hubname + " Unexpected event stream change: " + self.streamid + "/" + str(esid),
severity=ConsoleError, tb=False)
exitutils.FatalError("WS Stream ID Changed")
eseq = int(E.pop('@seqnum', -99))
if self.seq != eseq:
logsupport.Logs.Log(
self.hubname + " Event mismatch - Expected: " + str(self.seq) + " Got: " + str(eseq),
severity=ConsoleWarning)
raise ISYEMInternalError
else:
self.seq += 1
ecode = E.pop('control', 'Missing control')
if ecode in EVENT_CTRL:
prcode = EVENT_CTRL[ecode]
else:
prcode = "**" + ecode + "**"
eaction = E.pop('action', 'No action')
enode = E.pop('node', 'No node')
eInfo = E.pop('eventInfo', 'No EventInfo')
if isinstance(eaction, dict):
debug.debugPrint('DaemonStream', "V5 stream - pull up action value: ", eaction)
eaction = eaction["#text"] # the new xmltodict will return as data['action']['#text']
if enode in self.isy.NodesByAddr: # get the node to set if any
N = self.isy.NodesByAddr[enode]
else:
N = None
if ecode == 'ST': # update cached state first before posting alerts or race
if isinstance(N, isycodes.ThermType):
N.cur = isycodes.NormalizeState(eaction)
elif N is not None:
oldstate = N.devState
N.devState = isycodes.NormalizeState(eaction)
logsupport.Logs.Log('ISYchg', 'ISY Node: ', N.name, ' state change from: ', oldstate,
' to: ', N.devState, severity=ConsoleDetailHigh)
if (oldstate == N.devState) and self.THstate == 'running':
logsupport.Logs.Log(self.hubname +
" State report with no change: " + N.name + ' state: ' + str(
oldstate))
else:
logsupport.Logs.Log(self.hubname +
" Status change for " + N.name + '(' + str(enode) + ') to ' + str(
N.devState), severity=ConsoleDetailHigh)
# status changed to post to any alerts that want it
# since alerts can only react to the state of a node we check only on an ST message
# screens on the other hand may need to know about other actions (thermostat e.g.)
# so they get checked below under reportablecodes
# if I check alerts there I get extra invocations for the DON and DOF e.g. which while not
# harmful are anomolous
if enode in self.AlertNodes:
# alert node changed
debug.debugPrint('DaemonCtl', 'ISY reports change(alert):',
self.isy.NodesByAddr[enode].name)
for a in self.AlertNodes[enode]:
if self.THstate != 'running':
# this is a restart or initial dump so indicate upwards to avoid misleading log entry
if a.state == 'Armed':
a.state = 'Init'
logsupport.Logs.Log(self.hubname + " Node alert fired: " + str(a),
severity=ConsoleDetail)
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.ISYAlert, hub=self.isy.name, node=enode,
value=isycodes.NormalizeState(eaction), alert=a))
elif ecode == 'CLIHCS' and isinstance(N, isycodes.ThermType):
N.statecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIFS' and isinstance(N, isycodes.ThermType):
N.fancode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIMD' and isinstance(N, isycodes.ThermType):
N.modecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIHUM' and isinstance(N, isycodes.ThermType):
N.hum = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPH' and isinstance(N, isycodes.ThermType):
N.setlow = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPC' and isinstance(N, isycodes.ThermType):
N.sethigh = isycodes.NormalizeState(eaction)
if ecode in self.reportablecodes:
# Node change report
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
"Status update in stream: ",
eseq, ":",
prcode, " : ", enode, " : ", eInfo, " : ", eaction)
# logsupport.Logs.Log('reportable event '+str(ecode)+' for '+str(enode)+' action '+str(eaction))
PostIfInterested(self.isy, enode, isycodes.NormalizeState(eaction))
elif (prcode == 'Trigger') and (eaction == '6'):
vinfo = eInfo['var']
vartype = int(vinfo['@type'])
varid = int(vinfo['@id'])
varval = int(vinfo['val'])
debug.debugPrint('DaemonCtl', 'Var change: ', self.isy.Vars.GetNameFromAttr((vartype, varid)),
' set to ', varval)
debug.debugPrint('DaemonCtl', 'Var change:', ('Unkn', 'Integer', 'State')[vartype],
' variable ', varid,
' set to ', varval)
try:
self.isy.Vars.SetValByAttr((vartype, varid), varval, modifier=True)
except KeyError:
logsupport.Logs.Log(
"Unknown variable from " + self.hubname + " - probably added since startup",
severity=ConsoleWarning)
elif prcode == 'Heartbeat':
if self.hbcount > 0:
# wait 2 heartbeats
self.THstate = 'running'
self.lastheartbeat = time.time()
self.hbcount += 1
elif prcode == 'Billing':
self.THstate = 'running'
else:
pass # handle any other?
efmtact = E.pop('fmtAct', 'v4stream')
efmtnm = E.pop('fmtName', 'noName')
if E:
lev = ConsoleDetailHigh if str(
enode) in self.isy.V3Nodes else ConsoleWarning # supress to detail if it is a V3 node
logsupport.Logs.Log(
self.hubname + " Extra info in event: " + str(ecode) + '/' + str(prcode) + '/' + str(
eaction) + '/' + str(enode) + '/' + str(eInfo) + ' ' + str(E), severity=lev)
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
formatwsitem(esid, eseq, ecode, eaction, enode, eInfo, E, self.isy))
try:
isynd = self.isy.NodesByAddr[enode].name
except (KeyError, AttributeError):
isynd = enode
if ecode == '_5':
now = time.time()
if str(eaction) == '1':
# logsupport.Logs.Log(self.hubname, ' went busy')
self.isy.Busy = now
elif str(eaction) == '0':
if self.isy.Busy != 0:
# logsupport.Logs.Log(self.hubname, " cleared busy")
if now - self.isy.Busy > 10:
logsupport.Logs.Log(
"{}: busy for {:.4f} seconds".format(self.hubname, now - self.isy.Busy))
self.isy.Busy = 0
else:
logsupport.Logs.Log(self.hubname, " reported stand-alone not busy")
else:
logsupport.Logs.Log(self.hubname, " reported System Status: ", str(eaction))
if ecode == "ST" or (ecode == "_3" and eaction == "CE"):
if self.LastMsgErr[0] != '***' and (
BaseAddr(self.LastMsgErr[0]) == BaseAddr(enode)):
# ERR msg followed by clearing - ISY weirdness?
logsupport.Logs.Log(
"{} reported and immediately cleared error for node: {} ({}) (seq:{}/{})".format(
self.hubname,
isynd, BaseAddr(self.LastMsgErr[0]), self.LastMsgErr[1], eseq),
severity=ConsoleWarning, hb=True)
self.LastMsgErr = ('***', -99)
elif enode in self.isy.ErrNodes:
logsupport.Logs.Log("{} cleared comm error for node: {}".format(self.hubname, isynd))
if enode in self.isy.ErrNodes:
# logsupport.Logs.Log("Query thread still running")
del self.isy.ErrNodes[enode]
if self.LastMsgErr != ('***', -99):
# previous message was ERR and wasn't immediately cleared
try:
isyerrnd = self.isy.NodesByAddr[self.LastMsgErr[0]].name
except (KeyError, AttributeError):
isyerrnd = self.LastMsgErr[0]
logsupport.Logs.Log(
"{} WS stream shows comm error for node: {}(Seq:{})".format(self.hubname, isyerrnd,
self.LastMsgErr[1]),
severity=ConsoleWarning, hb=True)
if self.LastMsgErr[0] not in self.isy.ErrNodes:
self.isy.ErrNodes[self.LastMsgErr[0]] = eseq
self.DoNodeQuery(self.LastMsgErr[0], isyerrnd)
self.LastMsgErr = ('***', -99)
if ecode == "ERR":
if str(eaction) == "0":
pass
# logsupport.Logs.Log("ERR(0) seen: {}".format(repr(m)))
else:
# Note the error and wait one message to see if it immediately clears
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log("ERR(1) seen: {}".format(repr(xmltodict.parse(message))),
severity=ConsoleWarning)
if ecode == "_3" and eaction == "NE":
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log(
"{} WS stream reported NE error code on WS stream for node{}(Seq:{})".format(self.hubname,
isynd, eseq),
hb=True)
else:
logsupport.Logs.Log(self.hubname + " Strange item in event stream: " + str(m),
severity=ConsoleWarning)
safeprint(message)
except Exception as E:
logsupport.Logs.Log(self.hubname + " Exception in QH on message: ", repr(msav), ' Excp: ', repr(E),
severity=ConsoleWarning)
loopend = time.time()
self.isy.HBWS.Entry('Processing time: {} Done: {}'.format(loopend - loopstart, repr(
message)))
time.sleep(.001) # force thread to give up processor to allow response to time events
if self.isy.version == -1:
self.isy._HubOnline = True
time.sleep(7)
with open('/home/pi/Console/isystream.dmp', 'r') as f:
mes = f.readline() # absorb first
# safeprint("Message1: {}".format(mes))
while True:
mes = f.readline().rstrip('\n')
if mes == '':
# safeprint('Done')
break
# safeprint("Message: {}".format(mes))
on_message(None, mes)
time.sleep(.4)
while True:
time.sleep(500)
return
self.THstate = 'delaying'
logsupport.Logs.Log("{}: WS stream thread {} setup".format(self.hubname, self.QHnum), severity=ConsoleDetail)
if self.delayedstart != 0:
logsupport.Logs.Log(self.hubname + " Delaying Hub restart for probable network reset: ",
str(self.delayedstart), ' seconds')
time.sleep(self.delayedstart)
# websocket.enableTrace(True)
websocket.setdefaulttimeout(30)
if self.isy.addr.startswith('http://'):
wsurl = 'ws://' + self.isy.addr[7:] + '/rest/subscribe'
elif self.isy.addr.startswith('https://'):
wsurl = 'wss://' + self.isy.addr[8:] + '/rest/subscribe'
else:
wsurl = 'ws://' + self.isy.addr + '/rest/subscribe'
import logging
WStrace = open('/home/pi/WStrace', 'w')
print('Open {}'.format(wsurl), file=WStrace)
websocket.enableTrace(True, handler=logging.StreamHandler(stream=WStrace))
while True:
try:
# noinspection PyArgumentList
if self.connectionmode in ('trypolisy', 'polisyworked'):
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
else:
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
subprotocols=['ISYSUB'],
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
break
except AttributeError as e:
logsupport.Logs.Log(self.hubname + " Problem starting WS handler - retrying: ", repr(e))
self.lastheartbeat = time.time()
ws.run_forever(ping_timeout=999, sslopt={"cert_reqs": ssl.CERT_NONE})
self.THstate = 'failed'
self.isy._HubOnline = False
sev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log(self.hubname + " QH Thread " + str(self.QHnum) + " exiting", severity=sev,
tb=False) | elif self.lasterror == 'ISYNetDown':
# likely home network down so wait a bit
logsupport.Logs.Log(self.hubname + ' WS restart for NETUNREACH - delay likely router reboot or down', | random_line_split |
isyeventmonitor.py | import base64
import errno
import random
import ssl
import threading
import time
import copy
import websocket
import xmltodict
import config
import debug
from utils import exitutils
import hubs.isy.isycodes as isycodes
import logsupport
from controlevents import CEvent, PostEvent, ConsoleEvent, PostIfInterested
from hubs.isy.isycodes import EVENT_CTRL, formatwsitem
from logsupport import ConsoleWarning, ConsoleError, ConsoleDetail, ConsoleDetailHigh
from utils.threadmanager import ThreadStartException
from utils.utilfuncs import safeprint
class ISYEMInternalError(Exception):
pass
def BaseAddr(addr):
return None if addr is None else ' '.join(addr.split(' ')[0:-1])
class ISYEventMonitor(object):
def __init__(self, thisISY):
self.connectionmode = 'try994' # trypolisy: trying without subp, polisy: connection worked, try994: trying with subp 994worked.
self.isy = thisISY
self.hubname = thisISY.name
self.QHnum = 1
self.a = base64.b64encode((self.isy.user + ':' + self.isy.password).encode('utf-8'))
self.watchstarttime = time.time()
self.watchlist = []
self.streamid = "unset"
self.seq = 0
self.lastheartbeat = 0
self.hbcount = 0
self.AlertNodes = {}
self.delayedstart = 0
self.longdown = 0
self.WS = None
self.THstate = 'init'
self.querycnt = 0
self.queryqueued = {}
self.LastMsgErr = ('***', -99)
self.isy.Busy = 0
self.lasterror = 'Init'
debug.debugPrint('DaemonCtl', "Queue Handler ", self.QHnum, " started: ", self.watchstarttime)
self.reportablecodes = ["DON", "DFON", "DOF", "DFOF", "ST", "CLISP", "CLISPH", "CLISPC", "CLIFS",
"CLIMD", "CLIHUM", "CLIHCS", "BRT", "DIM"] # "RR", "OL",
def EndWSServer(self):
self.lasterror = "DirectCommError"
self.WS.close()
def RealQuery(self, enode, seq, ndnm):
logsupport.Logs.Log("Queued query attempt (" + str(seq) + ") for: " + ndnm)
time.sleep(105 + random.randint(0, 30)) # allow any in progress query at ISY a chance to clear
if enode not in self.isy.ErrNodes:
logsupport.Logs.Log("Node error cleared without need of query (" + str(seq) + ") for: " + ndnm)
return
logsupport.Logs.Log(self.hubname + ": Attempt query (" + str(seq) + ") for errored node: " + ndnm,
severity=ConsoleWarning)
r = self.isy.try_ISY_comm('query/' + enode, timeout=60, closeonfail=False)
if r == '':
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt failed for node: " + ndnm,
severity=ConsoleWarning)
else:
time.sleep(2)
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt succeeded for node: " + ndnm)
if enode in self.isy.ErrNodes: del self.isy.ErrNodes[enode]
if enode in self.queryqueued: del self.queryqueued[enode]
def DoNodeQuery(self, enode, ndnm):
if enode not in self.queryqueued:
self.querycnt += 1
self.queryqueued[enode] = self.querycnt
t = threading.Thread(name='Query-' + str(self.querycnt) + '-' + enode, target=self.RealQuery, daemon=True,
args=(enode, self.querycnt, ndnm))
t.start()
else:
logsupport.Logs.Log(
self.hubname + ": Query " + str(self.queryqueued[enode]) + " already queued for node: " + ndnm)
def FakeNodeChange(self):
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.HubNodeChange, hub=self.isy.name, node=None, value=-1))
def reinit(self):
self.watchstarttime = time.time()
self.watchlist = []
self.seq = 0
self.hbcount = 0
self.QHnum += 1
def PostStartQHThread(self):
|
def PreRestartQHThread(self):
self.isy._HubOnline = False
self.THstate = 'restarting'
try:
if self.lasterror == 'ISYSocketTimeOut':
logsupport.Logs.Log(self.hubname + '(TimeoutError) Wait for likely router reboot or down',
severity=ConsoleWarning, tb=False)
self.delayedstart = 150
self.reinit()
return
if self.lasterror == 'ISYWSTimeOut':
logsupport.Logs.Log(self.hubname + ' WS restart after surprise close - short delay (15)',
severity=ConsoleWarning)
self.delayedstart = 15
elif self.lasterror == 'ISYNetDown':
# likely home network down so wait a bit
logsupport.Logs.Log(self.hubname + ' WS restart for NETUNREACH - delay likely router reboot or down',
severity=ConsoleWarning)
self.delayedstart = 121
elif self.lasterror == 'ISYClose':
logsupport.Logs.Log(self.hubname + ' Recovering closed WS stream')
self.delayedstart = 2
elif self.lasterror == 'DirectCommError':
logsupport.Logs.Log(self.hubname + ' WS restart because of failed direct communication failure')
self.delayedstart = 90 # probably ISY doing query
elif self.lasterror == 'ISYNoRoute':
logsupport.Logs.Log("{}: Hub probably down (semi) permanently ({})".self.name, self.longdown)
self.delayedstart = 3600 + self.longdown * 1800 # spread checks way out
self.isy._HubOnline = False
self.longdown += 1
else:
logsupport.Logs.Log(self.hubname + ' Unexpected error on WS stream: ', self.lasterror,
severity=ConsoleError, tb=False)
self.delayedstart = 90
except Exception as e:
logsupport.Logs.Log(self.hubname + ' PreRestartQH internal error ', e)
self.reinit()
def QHandler(self):
def on_error(qws, error):
self.isy.HBWS.Entry(repr(error))
self.lasterror = "ISYUnknown"
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
if isinstance(error, websocket.WebSocketConnectionClosedException):
logsupport.Logs.Log(self.hubname + " WS connection closed - attempt to recontact ISY",
severity=reconsev)
self.lasterror = 'ISYClose'
elif isinstance(error, websocket.WebSocketTimeoutException):
logsupport.Logs.Log(self.hubname + " WS connection timed out", severity=ConsoleWarning)
self.lasterror = 'ISYWSTimeOut'
elif isinstance(error, TimeoutError):
logsupport.Logs.Log(self.hubname + " WS socket timed out", severity=ConsoleWarning)
self.lasterror = 'ISYSocketTimeOut'
elif isinstance(error, AttributeError):
logsupport.Logs.Log(self.hubname + " WS library bug", severity=ConsoleWarning)
self.lasterror = 'ISYClose'
elif isinstance(error, OSError):
if error.errno == errno.ENETUNREACH:
logsupport.Logs.Log(self.hubname + " WS network down", severity=ConsoleWarning)
self.lasterror = 'ISYNetDown'
else:
logsupport.Logs.Log(self.hubname + ' WS OS error', repr(error), severity=ConsoleError, tb=False)
self.lasterror = 'ISYNoRoute' # probably semi permanent failure
else:
if self.connectionmode == 'try994':
logsupport.Logs.Log("{}: Connection failed using 994 convention".format(self.hubname))
self.connectionmode = 'trypolisy'
elif self.connectionmode == 'trypolisy':
logsupport.Logs.Log("{}: Connection failed using Polisy convention".format(self.hubname))
self.connectionmode = 'try994'
else:
logsupport.Logs.Log(self.hubname + " Error in WS stream " + str(self.QHnum) + ': ' + repr(error),
severity=ConsoleError,
tb=True)
logsupport.Logs.Log(repr(websocket.WebSocketConnectionClosedException))
self.THstate = 'failed'
debug.debugPrint('DaemonCtl', "Websocket stream error", self.QHnum, repr(error))
qws.close()
# noinspection PyUnusedLocal
def on_close(qws, code, reason):
self.isy.HBWS.Entry("Close")
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log("{} WS stream {} closed: {}:{}".format(self.hubname, self.QHnum, code, reason),
severity=reconsev, hb=True)
debug.debugPrint('DaemonCtl', "ISY Websocket stream closed", str(code), str(reason))
def on_open(qws):
self.isy.HBWS.Entry("Open")
self.THstate = 'starting'
if self.connectionmode == 'try994':
self.connectionmode = '994worked'
logsupport.Logs.Log('{} connection worked using 994 convention'.format(self.isy.name))
elif self.connectionmode == 'trypolisy':
self.connectionmode = 'polisyworked'
logsupport.Logs.Log('{} connection worked using Polisy convention'.format(self.isy.name))
mess = '994' if self.connectionmode == '994worked' else 'Polisy' if self.connectionmode == 'polisyworked' else self.connectionmode
logsupport.Logs.Log("{}: WS stream {} opened ({})".format(self.hubname, self.QHnum, mess))
debug.debugPrint('DaemonCtl', "Websocket stream opened: ", self.QHnum, self.streamid)
self.WS = qws
# noinspection PyUnusedLocal,PyUnboundLocalVariable
def on_message(qws, message):
loopstart = time.time()
self.isy.HBWS.Entry('Message: {}'.format(repr(message)))
# print('Message: {}'.format(message))
try:
m = 'parse error'
m = xmltodict.parse(message)
msav = copy.deepcopy(m)
if debug.dbgStore.GetVal('ISYDump'):
debug.ISYDump("isystream.dmp", message, pretty=False)
# print(m)
if 'SubscriptionResponse' in m:
sr = m['SubscriptionResponse']
if self.streamid != sr['SID']:
self.streamid = sr['SID']
logsupport.Logs.Log("{}: Stream id: {}".format(self.hubname, self.streamid))
elif 'Event' in m:
E = m['Event']
esid = E.pop('@sid', 'No sid')
if self.streamid != esid:
logsupport.Logs.Log(
self.hubname + " Unexpected event stream change: " + self.streamid + "/" + str(esid),
severity=ConsoleError, tb=False)
exitutils.FatalError("WS Stream ID Changed")
eseq = int(E.pop('@seqnum', -99))
if self.seq != eseq:
logsupport.Logs.Log(
self.hubname + " Event mismatch - Expected: " + str(self.seq) + " Got: " + str(eseq),
severity=ConsoleWarning)
raise ISYEMInternalError
else:
self.seq += 1
ecode = E.pop('control', 'Missing control')
if ecode in EVENT_CTRL:
prcode = EVENT_CTRL[ecode]
else:
prcode = "**" + ecode + "**"
eaction = E.pop('action', 'No action')
enode = E.pop('node', 'No node')
eInfo = E.pop('eventInfo', 'No EventInfo')
if isinstance(eaction, dict):
debug.debugPrint('DaemonStream', "V5 stream - pull up action value: ", eaction)
eaction = eaction["#text"] # the new xmltodict will return as data['action']['#text']
if enode in self.isy.NodesByAddr: # get the node to set if any
N = self.isy.NodesByAddr[enode]
else:
N = None
if ecode == 'ST': # update cached state first before posting alerts or race
if isinstance(N, isycodes.ThermType):
N.cur = isycodes.NormalizeState(eaction)
elif N is not None:
oldstate = N.devState
N.devState = isycodes.NormalizeState(eaction)
logsupport.Logs.Log('ISYchg', 'ISY Node: ', N.name, ' state change from: ', oldstate,
' to: ', N.devState, severity=ConsoleDetailHigh)
if (oldstate == N.devState) and self.THstate == 'running':
logsupport.Logs.Log(self.hubname +
" State report with no change: " + N.name + ' state: ' + str(
oldstate))
else:
logsupport.Logs.Log(self.hubname +
" Status change for " + N.name + '(' + str(enode) + ') to ' + str(
N.devState), severity=ConsoleDetailHigh)
# status changed to post to any alerts that want it
# since alerts can only react to the state of a node we check only on an ST message
# screens on the other hand may need to know about other actions (thermostat e.g.)
# so they get checked below under reportablecodes
# if I check alerts there I get extra invocations for the DON and DOF e.g. which while not
# harmful are anomolous
if enode in self.AlertNodes:
# alert node changed
debug.debugPrint('DaemonCtl', 'ISY reports change(alert):',
self.isy.NodesByAddr[enode].name)
for a in self.AlertNodes[enode]:
if self.THstate != 'running':
# this is a restart or initial dump so indicate upwards to avoid misleading log entry
if a.state == 'Armed':
a.state = 'Init'
logsupport.Logs.Log(self.hubname + " Node alert fired: " + str(a),
severity=ConsoleDetail)
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.ISYAlert, hub=self.isy.name, node=enode,
value=isycodes.NormalizeState(eaction), alert=a))
elif ecode == 'CLIHCS' and isinstance(N, isycodes.ThermType):
N.statecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIFS' and isinstance(N, isycodes.ThermType):
N.fancode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIMD' and isinstance(N, isycodes.ThermType):
N.modecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIHUM' and isinstance(N, isycodes.ThermType):
N.hum = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPH' and isinstance(N, isycodes.ThermType):
N.setlow = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPC' and isinstance(N, isycodes.ThermType):
N.sethigh = isycodes.NormalizeState(eaction)
if ecode in self.reportablecodes:
# Node change report
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
"Status update in stream: ",
eseq, ":",
prcode, " : ", enode, " : ", eInfo, " : ", eaction)
# logsupport.Logs.Log('reportable event '+str(ecode)+' for '+str(enode)+' action '+str(eaction))
PostIfInterested(self.isy, enode, isycodes.NormalizeState(eaction))
elif (prcode == 'Trigger') and (eaction == '6'):
vinfo = eInfo['var']
vartype = int(vinfo['@type'])
varid = int(vinfo['@id'])
varval = int(vinfo['val'])
debug.debugPrint('DaemonCtl', 'Var change: ', self.isy.Vars.GetNameFromAttr((vartype, varid)),
' set to ', varval)
debug.debugPrint('DaemonCtl', 'Var change:', ('Unkn', 'Integer', 'State')[vartype],
' variable ', varid,
' set to ', varval)
try:
self.isy.Vars.SetValByAttr((vartype, varid), varval, modifier=True)
except KeyError:
logsupport.Logs.Log(
"Unknown variable from " + self.hubname + " - probably added since startup",
severity=ConsoleWarning)
elif prcode == 'Heartbeat':
if self.hbcount > 0:
# wait 2 heartbeats
self.THstate = 'running'
self.lastheartbeat = time.time()
self.hbcount += 1
elif prcode == 'Billing':
self.THstate = 'running'
else:
pass # handle any other?
efmtact = E.pop('fmtAct', 'v4stream')
efmtnm = E.pop('fmtName', 'noName')
if E:
lev = ConsoleDetailHigh if str(
enode) in self.isy.V3Nodes else ConsoleWarning # supress to detail if it is a V3 node
logsupport.Logs.Log(
self.hubname + " Extra info in event: " + str(ecode) + '/' + str(prcode) + '/' + str(
eaction) + '/' + str(enode) + '/' + str(eInfo) + ' ' + str(E), severity=lev)
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
formatwsitem(esid, eseq, ecode, eaction, enode, eInfo, E, self.isy))
try:
isynd = self.isy.NodesByAddr[enode].name
except (KeyError, AttributeError):
isynd = enode
if ecode == '_5':
now = time.time()
if str(eaction) == '1':
# logsupport.Logs.Log(self.hubname, ' went busy')
self.isy.Busy = now
elif str(eaction) == '0':
if self.isy.Busy != 0:
# logsupport.Logs.Log(self.hubname, " cleared busy")
if now - self.isy.Busy > 10:
logsupport.Logs.Log(
"{}: busy for {:.4f} seconds".format(self.hubname, now - self.isy.Busy))
self.isy.Busy = 0
else:
logsupport.Logs.Log(self.hubname, " reported stand-alone not busy")
else:
logsupport.Logs.Log(self.hubname, " reported System Status: ", str(eaction))
if ecode == "ST" or (ecode == "_3" and eaction == "CE"):
if self.LastMsgErr[0] != '***' and (
BaseAddr(self.LastMsgErr[0]) == BaseAddr(enode)):
# ERR msg followed by clearing - ISY weirdness?
logsupport.Logs.Log(
"{} reported and immediately cleared error for node: {} ({}) (seq:{}/{})".format(
self.hubname,
isynd, BaseAddr(self.LastMsgErr[0]), self.LastMsgErr[1], eseq),
severity=ConsoleWarning, hb=True)
self.LastMsgErr = ('***', -99)
elif enode in self.isy.ErrNodes:
logsupport.Logs.Log("{} cleared comm error for node: {}".format(self.hubname, isynd))
if enode in self.isy.ErrNodes:
# logsupport.Logs.Log("Query thread still running")
del self.isy.ErrNodes[enode]
if self.LastMsgErr != ('***', -99):
# previous message was ERR and wasn't immediately cleared
try:
isyerrnd = self.isy.NodesByAddr[self.LastMsgErr[0]].name
except (KeyError, AttributeError):
isyerrnd = self.LastMsgErr[0]
logsupport.Logs.Log(
"{} WS stream shows comm error for node: {}(Seq:{})".format(self.hubname, isyerrnd,
self.LastMsgErr[1]),
severity=ConsoleWarning, hb=True)
if self.LastMsgErr[0] not in self.isy.ErrNodes:
self.isy.ErrNodes[self.LastMsgErr[0]] = eseq
self.DoNodeQuery(self.LastMsgErr[0], isyerrnd)
self.LastMsgErr = ('***', -99)
if ecode == "ERR":
if str(eaction) == "0":
pass
# logsupport.Logs.Log("ERR(0) seen: {}".format(repr(m)))
else:
# Note the error and wait one message to see if it immediately clears
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log("ERR(1) seen: {}".format(repr(xmltodict.parse(message))),
severity=ConsoleWarning)
if ecode == "_3" and eaction == "NE":
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log(
"{} WS stream reported NE error code on WS stream for node{}(Seq:{})".format(self.hubname,
isynd, eseq),
hb=True)
else:
logsupport.Logs.Log(self.hubname + " Strange item in event stream: " + str(m),
severity=ConsoleWarning)
safeprint(message)
except Exception as E:
logsupport.Logs.Log(self.hubname + " Exception in QH on message: ", repr(msav), ' Excp: ', repr(E),
severity=ConsoleWarning)
loopend = time.time()
self.isy.HBWS.Entry('Processing time: {} Done: {}'.format(loopend - loopstart, repr(
message)))
time.sleep(.001) # force thread to give up processor to allow response to time events
if self.isy.version == -1:
self.isy._HubOnline = True
time.sleep(7)
with open('/home/pi/Console/isystream.dmp', 'r') as f:
mes = f.readline() # absorb first
# safeprint("Message1: {}".format(mes))
while True:
mes = f.readline().rstrip('\n')
if mes == '':
# safeprint('Done')
break
# safeprint("Message: {}".format(mes))
on_message(None, mes)
time.sleep(.4)
while True:
time.sleep(500)
return
self.THstate = 'delaying'
logsupport.Logs.Log("{}: WS stream thread {} setup".format(self.hubname, self.QHnum), severity=ConsoleDetail)
if self.delayedstart != 0:
logsupport.Logs.Log(self.hubname + " Delaying Hub restart for probable network reset: ",
str(self.delayedstart), ' seconds')
time.sleep(self.delayedstart)
# websocket.enableTrace(True)
websocket.setdefaulttimeout(30)
if self.isy.addr.startswith('http://'):
wsurl = 'ws://' + self.isy.addr[7:] + '/rest/subscribe'
elif self.isy.addr.startswith('https://'):
wsurl = 'wss://' + self.isy.addr[8:] + '/rest/subscribe'
else:
wsurl = 'ws://' + self.isy.addr + '/rest/subscribe'
import logging
WStrace = open('/home/pi/WStrace', 'w')
print('Open {}'.format(wsurl), file=WStrace)
websocket.enableTrace(True, handler=logging.StreamHandler(stream=WStrace))
while True:
try:
# noinspection PyArgumentList
if self.connectionmode in ('trypolisy', 'polisyworked'):
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
else:
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
subprotocols=['ISYSUB'],
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
break
except AttributeError as e:
logsupport.Logs.Log(self.hubname + " Problem starting WS handler - retrying: ", repr(e))
self.lastheartbeat = time.time()
ws.run_forever(ping_timeout=999, sslopt={"cert_reqs": ssl.CERT_NONE})
self.THstate = 'failed'
self.isy._HubOnline = False
sev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log(self.hubname + " QH Thread " + str(self.QHnum) + " exiting", severity=sev,
tb=False)
| if self.isy.version == -1:
# test mode
return
hungcount = 40
while self.THstate == 'restarting':
logsupport.Logs.Log(self.hubname + " Waiting thread start")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
while self.THstate == 'delaying':
time.sleep(1)
hungcount = 60
while self.THstate == 'starting':
logsupport.Logs.Log(self.hubname + ": Waiting initial status dump")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
if self.THstate == 'running':
self.isy._HubOnline = True
self.isy.Vars.CheckValsUpToDate(reload=True)
logsupport.Logs.Log(self.hubname + ": Initial status streamed ", self.seq, " items and vars updated")
elif self.THstate == 'failed':
logsupport.Logs.Log(self.hubname + " Failed Thread Restart", severity=ConsoleWarning)
else:
logsupport.Logs.Log(self.hubname + " Unknown ISY QH Thread state") | identifier_body |
isyeventmonitor.py | import base64
import errno
import random
import ssl
import threading
import time
import copy
import websocket
import xmltodict
import config
import debug
from utils import exitutils
import hubs.isy.isycodes as isycodes
import logsupport
from controlevents import CEvent, PostEvent, ConsoleEvent, PostIfInterested
from hubs.isy.isycodes import EVENT_CTRL, formatwsitem
from logsupport import ConsoleWarning, ConsoleError, ConsoleDetail, ConsoleDetailHigh
from utils.threadmanager import ThreadStartException
from utils.utilfuncs import safeprint
class ISYEMInternalError(Exception):
pass
def BaseAddr(addr):
return None if addr is None else ' '.join(addr.split(' ')[0:-1])
class ISYEventMonitor(object):
def __init__(self, thisISY):
self.connectionmode = 'try994' # trypolisy: trying without subp, polisy: connection worked, try994: trying with subp 994worked.
self.isy = thisISY
self.hubname = thisISY.name
self.QHnum = 1
self.a = base64.b64encode((self.isy.user + ':' + self.isy.password).encode('utf-8'))
self.watchstarttime = time.time()
self.watchlist = []
self.streamid = "unset"
self.seq = 0
self.lastheartbeat = 0
self.hbcount = 0
self.AlertNodes = {}
self.delayedstart = 0
self.longdown = 0
self.WS = None
self.THstate = 'init'
self.querycnt = 0
self.queryqueued = {}
self.LastMsgErr = ('***', -99)
self.isy.Busy = 0
self.lasterror = 'Init'
debug.debugPrint('DaemonCtl', "Queue Handler ", self.QHnum, " started: ", self.watchstarttime)
self.reportablecodes = ["DON", "DFON", "DOF", "DFOF", "ST", "CLISP", "CLISPH", "CLISPC", "CLIFS",
"CLIMD", "CLIHUM", "CLIHCS", "BRT", "DIM"] # "RR", "OL",
def EndWSServer(self):
self.lasterror = "DirectCommError"
self.WS.close()
def RealQuery(self, enode, seq, ndnm):
logsupport.Logs.Log("Queued query attempt (" + str(seq) + ") for: " + ndnm)
time.sleep(105 + random.randint(0, 30)) # allow any in progress query at ISY a chance to clear
if enode not in self.isy.ErrNodes:
logsupport.Logs.Log("Node error cleared without need of query (" + str(seq) + ") for: " + ndnm)
return
logsupport.Logs.Log(self.hubname + ": Attempt query (" + str(seq) + ") for errored node: " + ndnm,
severity=ConsoleWarning)
r = self.isy.try_ISY_comm('query/' + enode, timeout=60, closeonfail=False)
if r == '':
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt failed for node: " + ndnm,
severity=ConsoleWarning)
else:
time.sleep(2)
logsupport.Logs.Log(self.hubname + ": Query (" + str(seq) + ") attempt succeeded for node: " + ndnm)
if enode in self.isy.ErrNodes: del self.isy.ErrNodes[enode]
if enode in self.queryqueued: del self.queryqueued[enode]
def DoNodeQuery(self, enode, ndnm):
if enode not in self.queryqueued:
self.querycnt += 1
self.queryqueued[enode] = self.querycnt
t = threading.Thread(name='Query-' + str(self.querycnt) + '-' + enode, target=self.RealQuery, daemon=True,
args=(enode, self.querycnt, ndnm))
t.start()
else:
logsupport.Logs.Log(
self.hubname + ": Query " + str(self.queryqueued[enode]) + " already queued for node: " + ndnm)
def | (self):
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.HubNodeChange, hub=self.isy.name, node=None, value=-1))
def reinit(self):
self.watchstarttime = time.time()
self.watchlist = []
self.seq = 0
self.hbcount = 0
self.QHnum += 1
def PostStartQHThread(self):
if self.isy.version == -1:
# test mode
return
hungcount = 40
while self.THstate == 'restarting':
logsupport.Logs.Log(self.hubname + " Waiting thread start")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
while self.THstate == 'delaying':
time.sleep(1)
hungcount = 60
while self.THstate == 'starting':
logsupport.Logs.Log(self.hubname + ": Waiting initial status dump")
time.sleep(2)
hungcount -= 1
if hungcount < 0: raise ThreadStartException
if self.THstate == 'running':
self.isy._HubOnline = True
self.isy.Vars.CheckValsUpToDate(reload=True)
logsupport.Logs.Log(self.hubname + ": Initial status streamed ", self.seq, " items and vars updated")
elif self.THstate == 'failed':
logsupport.Logs.Log(self.hubname + " Failed Thread Restart", severity=ConsoleWarning)
else:
logsupport.Logs.Log(self.hubname + " Unknown ISY QH Thread state")
def PreRestartQHThread(self):
self.isy._HubOnline = False
self.THstate = 'restarting'
try:
if self.lasterror == 'ISYSocketTimeOut':
logsupport.Logs.Log(self.hubname + '(TimeoutError) Wait for likely router reboot or down',
severity=ConsoleWarning, tb=False)
self.delayedstart = 150
self.reinit()
return
if self.lasterror == 'ISYWSTimeOut':
logsupport.Logs.Log(self.hubname + ' WS restart after surprise close - short delay (15)',
severity=ConsoleWarning)
self.delayedstart = 15
elif self.lasterror == 'ISYNetDown':
# likely home network down so wait a bit
logsupport.Logs.Log(self.hubname + ' WS restart for NETUNREACH - delay likely router reboot or down',
severity=ConsoleWarning)
self.delayedstart = 121
elif self.lasterror == 'ISYClose':
logsupport.Logs.Log(self.hubname + ' Recovering closed WS stream')
self.delayedstart = 2
elif self.lasterror == 'DirectCommError':
logsupport.Logs.Log(self.hubname + ' WS restart because of failed direct communication failure')
self.delayedstart = 90 # probably ISY doing query
elif self.lasterror == 'ISYNoRoute':
logsupport.Logs.Log("{}: Hub probably down (semi) permanently ({})".self.name, self.longdown)
self.delayedstart = 3600 + self.longdown * 1800 # spread checks way out
self.isy._HubOnline = False
self.longdown += 1
else:
logsupport.Logs.Log(self.hubname + ' Unexpected error on WS stream: ', self.lasterror,
severity=ConsoleError, tb=False)
self.delayedstart = 90
except Exception as e:
logsupport.Logs.Log(self.hubname + ' PreRestartQH internal error ', e)
self.reinit()
def QHandler(self):
def on_error(qws, error):
self.isy.HBWS.Entry(repr(error))
self.lasterror = "ISYUnknown"
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
if isinstance(error, websocket.WebSocketConnectionClosedException):
logsupport.Logs.Log(self.hubname + " WS connection closed - attempt to recontact ISY",
severity=reconsev)
self.lasterror = 'ISYClose'
elif isinstance(error, websocket.WebSocketTimeoutException):
logsupport.Logs.Log(self.hubname + " WS connection timed out", severity=ConsoleWarning)
self.lasterror = 'ISYWSTimeOut'
elif isinstance(error, TimeoutError):
logsupport.Logs.Log(self.hubname + " WS socket timed out", severity=ConsoleWarning)
self.lasterror = 'ISYSocketTimeOut'
elif isinstance(error, AttributeError):
logsupport.Logs.Log(self.hubname + " WS library bug", severity=ConsoleWarning)
self.lasterror = 'ISYClose'
elif isinstance(error, OSError):
if error.errno == errno.ENETUNREACH:
logsupport.Logs.Log(self.hubname + " WS network down", severity=ConsoleWarning)
self.lasterror = 'ISYNetDown'
else:
logsupport.Logs.Log(self.hubname + ' WS OS error', repr(error), severity=ConsoleError, tb=False)
self.lasterror = 'ISYNoRoute' # probably semi permanent failure
else:
if self.connectionmode == 'try994':
logsupport.Logs.Log("{}: Connection failed using 994 convention".format(self.hubname))
self.connectionmode = 'trypolisy'
elif self.connectionmode == 'trypolisy':
logsupport.Logs.Log("{}: Connection failed using Polisy convention".format(self.hubname))
self.connectionmode = 'try994'
else:
logsupport.Logs.Log(self.hubname + " Error in WS stream " + str(self.QHnum) + ': ' + repr(error),
severity=ConsoleError,
tb=True)
logsupport.Logs.Log(repr(websocket.WebSocketConnectionClosedException))
self.THstate = 'failed'
debug.debugPrint('DaemonCtl', "Websocket stream error", self.QHnum, repr(error))
qws.close()
# noinspection PyUnusedLocal
def on_close(qws, code, reason):
self.isy.HBWS.Entry("Close")
reconsev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log("{} WS stream {} closed: {}:{}".format(self.hubname, self.QHnum, code, reason),
severity=reconsev, hb=True)
debug.debugPrint('DaemonCtl', "ISY Websocket stream closed", str(code), str(reason))
def on_open(qws):
self.isy.HBWS.Entry("Open")
self.THstate = 'starting'
if self.connectionmode == 'try994':
self.connectionmode = '994worked'
logsupport.Logs.Log('{} connection worked using 994 convention'.format(self.isy.name))
elif self.connectionmode == 'trypolisy':
self.connectionmode = 'polisyworked'
logsupport.Logs.Log('{} connection worked using Polisy convention'.format(self.isy.name))
mess = '994' if self.connectionmode == '994worked' else 'Polisy' if self.connectionmode == 'polisyworked' else self.connectionmode
logsupport.Logs.Log("{}: WS stream {} opened ({})".format(self.hubname, self.QHnum, mess))
debug.debugPrint('DaemonCtl', "Websocket stream opened: ", self.QHnum, self.streamid)
self.WS = qws
# noinspection PyUnusedLocal,PyUnboundLocalVariable
def on_message(qws, message):
loopstart = time.time()
self.isy.HBWS.Entry('Message: {}'.format(repr(message)))
# print('Message: {}'.format(message))
try:
m = 'parse error'
m = xmltodict.parse(message)
msav = copy.deepcopy(m)
if debug.dbgStore.GetVal('ISYDump'):
debug.ISYDump("isystream.dmp", message, pretty=False)
# print(m)
if 'SubscriptionResponse' in m:
sr = m['SubscriptionResponse']
if self.streamid != sr['SID']:
self.streamid = sr['SID']
logsupport.Logs.Log("{}: Stream id: {}".format(self.hubname, self.streamid))
elif 'Event' in m:
E = m['Event']
esid = E.pop('@sid', 'No sid')
if self.streamid != esid:
logsupport.Logs.Log(
self.hubname + " Unexpected event stream change: " + self.streamid + "/" + str(esid),
severity=ConsoleError, tb=False)
exitutils.FatalError("WS Stream ID Changed")
eseq = int(E.pop('@seqnum', -99))
if self.seq != eseq:
logsupport.Logs.Log(
self.hubname + " Event mismatch - Expected: " + str(self.seq) + " Got: " + str(eseq),
severity=ConsoleWarning)
raise ISYEMInternalError
else:
self.seq += 1
ecode = E.pop('control', 'Missing control')
if ecode in EVENT_CTRL:
prcode = EVENT_CTRL[ecode]
else:
prcode = "**" + ecode + "**"
eaction = E.pop('action', 'No action')
enode = E.pop('node', 'No node')
eInfo = E.pop('eventInfo', 'No EventInfo')
if isinstance(eaction, dict):
debug.debugPrint('DaemonStream', "V5 stream - pull up action value: ", eaction)
eaction = eaction["#text"] # the new xmltodict will return as data['action']['#text']
if enode in self.isy.NodesByAddr: # get the node to set if any
N = self.isy.NodesByAddr[enode]
else:
N = None
if ecode == 'ST': # update cached state first before posting alerts or race
if isinstance(N, isycodes.ThermType):
N.cur = isycodes.NormalizeState(eaction)
elif N is not None:
oldstate = N.devState
N.devState = isycodes.NormalizeState(eaction)
logsupport.Logs.Log('ISYchg', 'ISY Node: ', N.name, ' state change from: ', oldstate,
' to: ', N.devState, severity=ConsoleDetailHigh)
if (oldstate == N.devState) and self.THstate == 'running':
logsupport.Logs.Log(self.hubname +
" State report with no change: " + N.name + ' state: ' + str(
oldstate))
else:
logsupport.Logs.Log(self.hubname +
" Status change for " + N.name + '(' + str(enode) + ') to ' + str(
N.devState), severity=ConsoleDetailHigh)
# status changed to post to any alerts that want it
# since alerts can only react to the state of a node we check only on an ST message
# screens on the other hand may need to know about other actions (thermostat e.g.)
# so they get checked below under reportablecodes
# if I check alerts there I get extra invocations for the DON and DOF e.g. which while not
# harmful are anomolous
if enode in self.AlertNodes:
# alert node changed
debug.debugPrint('DaemonCtl', 'ISY reports change(alert):',
self.isy.NodesByAddr[enode].name)
for a in self.AlertNodes[enode]:
if self.THstate != 'running':
# this is a restart or initial dump so indicate upwards to avoid misleading log entry
if a.state == 'Armed':
a.state = 'Init'
logsupport.Logs.Log(self.hubname + " Node alert fired: " + str(a),
severity=ConsoleDetail)
# noinspection PyArgumentList
PostEvent(ConsoleEvent(CEvent.ISYAlert, hub=self.isy.name, node=enode,
value=isycodes.NormalizeState(eaction), alert=a))
elif ecode == 'CLIHCS' and isinstance(N, isycodes.ThermType):
N.statecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIFS' and isinstance(N, isycodes.ThermType):
N.fancode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIMD' and isinstance(N, isycodes.ThermType):
N.modecode = isycodes.NormalizeState(eaction)
elif ecode == 'CLIHUM' and isinstance(N, isycodes.ThermType):
N.hum = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPH' and isinstance(N, isycodes.ThermType):
N.setlow = isycodes.NormalizeState(eaction)
elif ecode == 'CLISPC' and isinstance(N, isycodes.ThermType):
N.sethigh = isycodes.NormalizeState(eaction)
if ecode in self.reportablecodes:
# Node change report
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
"Status update in stream: ",
eseq, ":",
prcode, " : ", enode, " : ", eInfo, " : ", eaction)
# logsupport.Logs.Log('reportable event '+str(ecode)+' for '+str(enode)+' action '+str(eaction))
PostIfInterested(self.isy, enode, isycodes.NormalizeState(eaction))
elif (prcode == 'Trigger') and (eaction == '6'):
vinfo = eInfo['var']
vartype = int(vinfo['@type'])
varid = int(vinfo['@id'])
varval = int(vinfo['val'])
debug.debugPrint('DaemonCtl', 'Var change: ', self.isy.Vars.GetNameFromAttr((vartype, varid)),
' set to ', varval)
debug.debugPrint('DaemonCtl', 'Var change:', ('Unkn', 'Integer', 'State')[vartype],
' variable ', varid,
' set to ', varval)
try:
self.isy.Vars.SetValByAttr((vartype, varid), varval, modifier=True)
except KeyError:
logsupport.Logs.Log(
"Unknown variable from " + self.hubname + " - probably added since startup",
severity=ConsoleWarning)
elif prcode == 'Heartbeat':
if self.hbcount > 0:
# wait 2 heartbeats
self.THstate = 'running'
self.lastheartbeat = time.time()
self.hbcount += 1
elif prcode == 'Billing':
self.THstate = 'running'
else:
pass # handle any other?
efmtact = E.pop('fmtAct', 'v4stream')
efmtnm = E.pop('fmtName', 'noName')
if E:
lev = ConsoleDetailHigh if str(
enode) in self.isy.V3Nodes else ConsoleWarning # supress to detail if it is a V3 node
logsupport.Logs.Log(
self.hubname + " Extra info in event: " + str(ecode) + '/' + str(prcode) + '/' + str(
eaction) + '/' + str(enode) + '/' + str(eInfo) + ' ' + str(E), severity=lev)
debug.debugPrint('DaemonStream', time.time() - config.sysStore.ConsoleStartTime,
formatwsitem(esid, eseq, ecode, eaction, enode, eInfo, E, self.isy))
try:
isynd = self.isy.NodesByAddr[enode].name
except (KeyError, AttributeError):
isynd = enode
if ecode == '_5':
now = time.time()
if str(eaction) == '1':
# logsupport.Logs.Log(self.hubname, ' went busy')
self.isy.Busy = now
elif str(eaction) == '0':
if self.isy.Busy != 0:
# logsupport.Logs.Log(self.hubname, " cleared busy")
if now - self.isy.Busy > 10:
logsupport.Logs.Log(
"{}: busy for {:.4f} seconds".format(self.hubname, now - self.isy.Busy))
self.isy.Busy = 0
else:
logsupport.Logs.Log(self.hubname, " reported stand-alone not busy")
else:
logsupport.Logs.Log(self.hubname, " reported System Status: ", str(eaction))
if ecode == "ST" or (ecode == "_3" and eaction == "CE"):
if self.LastMsgErr[0] != '***' and (
BaseAddr(self.LastMsgErr[0]) == BaseAddr(enode)):
# ERR msg followed by clearing - ISY weirdness?
logsupport.Logs.Log(
"{} reported and immediately cleared error for node: {} ({}) (seq:{}/{})".format(
self.hubname,
isynd, BaseAddr(self.LastMsgErr[0]), self.LastMsgErr[1], eseq),
severity=ConsoleWarning, hb=True)
self.LastMsgErr = ('***', -99)
elif enode in self.isy.ErrNodes:
logsupport.Logs.Log("{} cleared comm error for node: {}".format(self.hubname, isynd))
if enode in self.isy.ErrNodes:
# logsupport.Logs.Log("Query thread still running")
del self.isy.ErrNodes[enode]
if self.LastMsgErr != ('***', -99):
# previous message was ERR and wasn't immediately cleared
try:
isyerrnd = self.isy.NodesByAddr[self.LastMsgErr[0]].name
except (KeyError, AttributeError):
isyerrnd = self.LastMsgErr[0]
logsupport.Logs.Log(
"{} WS stream shows comm error for node: {}(Seq:{})".format(self.hubname, isyerrnd,
self.LastMsgErr[1]),
severity=ConsoleWarning, hb=True)
if self.LastMsgErr[0] not in self.isy.ErrNodes:
self.isy.ErrNodes[self.LastMsgErr[0]] = eseq
self.DoNodeQuery(self.LastMsgErr[0], isyerrnd)
self.LastMsgErr = ('***', -99)
if ecode == "ERR":
if str(eaction) == "0":
pass
# logsupport.Logs.Log("ERR(0) seen: {}".format(repr(m)))
else:
# Note the error and wait one message to see if it immediately clears
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log("ERR(1) seen: {}".format(repr(xmltodict.parse(message))),
severity=ConsoleWarning)
if ecode == "_3" and eaction == "NE":
self.LastMsgErr = (enode, eseq)
logsupport.Logs.Log(
"{} WS stream reported NE error code on WS stream for node{}(Seq:{})".format(self.hubname,
isynd, eseq),
hb=True)
else:
logsupport.Logs.Log(self.hubname + " Strange item in event stream: " + str(m),
severity=ConsoleWarning)
safeprint(message)
except Exception as E:
logsupport.Logs.Log(self.hubname + " Exception in QH on message: ", repr(msav), ' Excp: ', repr(E),
severity=ConsoleWarning)
loopend = time.time()
self.isy.HBWS.Entry('Processing time: {} Done: {}'.format(loopend - loopstart, repr(
message)))
time.sleep(.001) # force thread to give up processor to allow response to time events
if self.isy.version == -1:
self.isy._HubOnline = True
time.sleep(7)
with open('/home/pi/Console/isystream.dmp', 'r') as f:
mes = f.readline() # absorb first
# safeprint("Message1: {}".format(mes))
while True:
mes = f.readline().rstrip('\n')
if mes == '':
# safeprint('Done')
break
# safeprint("Message: {}".format(mes))
on_message(None, mes)
time.sleep(.4)
while True:
time.sleep(500)
return
self.THstate = 'delaying'
logsupport.Logs.Log("{}: WS stream thread {} setup".format(self.hubname, self.QHnum), severity=ConsoleDetail)
if self.delayedstart != 0:
logsupport.Logs.Log(self.hubname + " Delaying Hub restart for probable network reset: ",
str(self.delayedstart), ' seconds')
time.sleep(self.delayedstart)
# websocket.enableTrace(True)
websocket.setdefaulttimeout(30)
if self.isy.addr.startswith('http://'):
wsurl = 'ws://' + self.isy.addr[7:] + '/rest/subscribe'
elif self.isy.addr.startswith('https://'):
wsurl = 'wss://' + self.isy.addr[8:] + '/rest/subscribe'
else:
wsurl = 'ws://' + self.isy.addr + '/rest/subscribe'
import logging
WStrace = open('/home/pi/WStrace', 'w')
print('Open {}'.format(wsurl), file=WStrace)
websocket.enableTrace(True, handler=logging.StreamHandler(stream=WStrace))
while True:
try:
# noinspection PyArgumentList
if self.connectionmode in ('trypolisy', 'polisyworked'):
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
else:
ws = websocket.WebSocketApp(wsurl, on_message=on_message,
on_error=on_error,
subprotocols=['ISYSUB'],
on_close=on_close, on_open=on_open,
header={'Authorization': 'Basic ' + self.a.decode('ascii')})
break
except AttributeError as e:
logsupport.Logs.Log(self.hubname + " Problem starting WS handler - retrying: ", repr(e))
self.lastheartbeat = time.time()
ws.run_forever(ping_timeout=999, sslopt={"cert_reqs": ssl.CERT_NONE})
self.THstate = 'failed'
self.isy._HubOnline = False
sev = ConsoleWarning if config.sysStore.ErrLogReconnects else logsupport.ConsoleInfo
logsupport.Logs.Log(self.hubname + " QH Thread " + str(self.QHnum) + " exiting", severity=sev,
tb=False)
| FakeNodeChange | identifier_name |
errors.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::local_data;
use cssparser::ast::{SyntaxError, SourceLocation};
pub struct ErrorLoggerIterator<I>(I);
impl<T, I: Iterator<Result<T, SyntaxError>>> Iterator<T> for ErrorLoggerIterator<I> {
fn next(&mut self) -> Option<T> {
let ErrorLoggerIterator(ref mut this) = *self;
loop {
match this.next() {
Some(Ok(v)) => return Some(v),
Some(Err(error)) => log_css_error(error.location, format!("{:?}", error.reason)),
None => return None,
}
}
}
}
// FIXME: go back to `()` instead of `bool` after upgrading Rust
// past 898669c4e203ae91e2048fb6c0f8591c867bccc6
// Using bool is a work-around for https://github.com/mozilla/rust/issues/13322
local_data_key!(silence_errors: bool)
pub fn log_css_error(location: SourceLocation, message: &str) {
// TODO eventually this will got into a "web console" or something.
if local_data::get(silence_errors, |silenced| silenced.is_none()) {
error!("{:u}:{:u} {:s}", location.line, location.column, message)
}
}
pub fn with_errors_silenced<T>(f: || -> T) -> T | {
local_data::set(silence_errors, true);
let result = f();
local_data::pop(silence_errors);
result
} | identifier_body | |
errors.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::local_data;
use cssparser::ast::{SyntaxError, SourceLocation};
pub struct ErrorLoggerIterator<I>(I);
impl<T, I: Iterator<Result<T, SyntaxError>>> Iterator<T> for ErrorLoggerIterator<I> {
fn next(&mut self) -> Option<T> {
let ErrorLoggerIterator(ref mut this) = *self;
loop {
match this.next() {
Some(Ok(v)) => return Some(v),
Some(Err(error)) => log_css_error(error.location, format!("{:?}", error.reason)),
None => return None,
}
}
}
}
// FIXME: go back to `()` instead of `bool` after upgrading Rust
// past 898669c4e203ae91e2048fb6c0f8591c867bccc6
// Using bool is a work-around for https://github.com/mozilla/rust/issues/13322
local_data_key!(silence_errors: bool)
pub fn log_css_error(location: SourceLocation, message: &str) {
// TODO eventually this will got into a "web console" or something.
if local_data::get(silence_errors, |silenced| silenced.is_none()) {
error!("{:u}:{:u} {:s}", location.line, location.column, message)
}
}
pub fn | <T>(f: || -> T) -> T {
local_data::set(silence_errors, true);
let result = f();
local_data::pop(silence_errors);
result
}
| with_errors_silenced | identifier_name |
errors.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use std::local_data;
use cssparser::ast::{SyntaxError, SourceLocation};
pub struct ErrorLoggerIterator<I>(I);
impl<T, I: Iterator<Result<T, SyntaxError>>> Iterator<T> for ErrorLoggerIterator<I> {
fn next(&mut self) -> Option<T> {
let ErrorLoggerIterator(ref mut this) = *self;
loop {
match this.next() { | }
}
}
}
// FIXME: go back to `()` instead of `bool` after upgrading Rust
// past 898669c4e203ae91e2048fb6c0f8591c867bccc6
// Using bool is a work-around for https://github.com/mozilla/rust/issues/13322
local_data_key!(silence_errors: bool)
pub fn log_css_error(location: SourceLocation, message: &str) {
// TODO eventually this will got into a "web console" or something.
if local_data::get(silence_errors, |silenced| silenced.is_none()) {
error!("{:u}:{:u} {:s}", location.line, location.column, message)
}
}
pub fn with_errors_silenced<T>(f: || -> T) -> T {
local_data::set(silence_errors, true);
let result = f();
local_data::pop(silence_errors);
result
} | Some(Ok(v)) => return Some(v),
Some(Err(error)) => log_css_error(error.location, format!("{:?}", error.reason)),
None => return None, | random_line_split |
users.js | "use strict";
var user = require('../../user'),
meta = require('../../meta');
var usersController = {};
usersController.search = function(req, res, next) {
res.render('admin/manage/users', {
search_display: '',
loadmore_display: 'hide',
users: []
});
};
usersController.sortByPosts = function(req, res, next) {
getUsers('users:postcount', req, res, next);
};
usersController.sortByReputation = function(req, res, next) {
getUsers('users:reputation', req, res, next);
};
usersController.sortByJoinDate = function(req, res, next) {
getUsers('users:joindate', req, res, next);
};
usersController.banned = function(req, res, next) {
getUsers('users:banned', req, res, next);
};
function getUsers(set, req, res, next) |
usersController.getCSV = function(req, res, next) {
user.getUsersCSV(function(err, data) {
if (err) {
return next(err);
}
res.attachment('users.csv');
res.setHeader('Content-Type', 'text/csv');
res.end(data);
});
};
module.exports = usersController;
| {
user.getUsersFromSet(set, req.uid, 0, 49, function(err, users) {
if (err) {
return next(err);
}
users = users.filter(function(user) {
return user && parseInt(user.uid, 10);
});
res.render('admin/manage/users', {
search_display: 'hidden',
loadmore_display: 'block',
users: users,
yourid: req.uid,
requireEmailConfirmation: parseInt(meta.config.requireEmailConfirmation, 10) === 1
});
});
} | identifier_body |
users.js | "use strict";
var user = require('../../user'),
meta = require('../../meta');
var usersController = {};
usersController.search = function(req, res, next) {
res.render('admin/manage/users', {
search_display: '',
loadmore_display: 'hide',
users: []
});
};
| };
usersController.sortByReputation = function(req, res, next) {
getUsers('users:reputation', req, res, next);
};
usersController.sortByJoinDate = function(req, res, next) {
getUsers('users:joindate', req, res, next);
};
usersController.banned = function(req, res, next) {
getUsers('users:banned', req, res, next);
};
function getUsers(set, req, res, next) {
user.getUsersFromSet(set, req.uid, 0, 49, function(err, users) {
if (err) {
return next(err);
}
users = users.filter(function(user) {
return user && parseInt(user.uid, 10);
});
res.render('admin/manage/users', {
search_display: 'hidden',
loadmore_display: 'block',
users: users,
yourid: req.uid,
requireEmailConfirmation: parseInt(meta.config.requireEmailConfirmation, 10) === 1
});
});
}
usersController.getCSV = function(req, res, next) {
user.getUsersCSV(function(err, data) {
if (err) {
return next(err);
}
res.attachment('users.csv');
res.setHeader('Content-Type', 'text/csv');
res.end(data);
});
};
module.exports = usersController; | usersController.sortByPosts = function(req, res, next) {
getUsers('users:postcount', req, res, next); | random_line_split |
users.js | "use strict";
var user = require('../../user'),
meta = require('../../meta');
var usersController = {};
usersController.search = function(req, res, next) {
res.render('admin/manage/users', {
search_display: '',
loadmore_display: 'hide',
users: []
});
};
usersController.sortByPosts = function(req, res, next) {
getUsers('users:postcount', req, res, next);
};
usersController.sortByReputation = function(req, res, next) {
getUsers('users:reputation', req, res, next);
};
usersController.sortByJoinDate = function(req, res, next) {
getUsers('users:joindate', req, res, next);
};
usersController.banned = function(req, res, next) {
getUsers('users:banned', req, res, next);
};
function | (set, req, res, next) {
user.getUsersFromSet(set, req.uid, 0, 49, function(err, users) {
if (err) {
return next(err);
}
users = users.filter(function(user) {
return user && parseInt(user.uid, 10);
});
res.render('admin/manage/users', {
search_display: 'hidden',
loadmore_display: 'block',
users: users,
yourid: req.uid,
requireEmailConfirmation: parseInt(meta.config.requireEmailConfirmation, 10) === 1
});
});
}
usersController.getCSV = function(req, res, next) {
user.getUsersCSV(function(err, data) {
if (err) {
return next(err);
}
res.attachment('users.csv');
res.setHeader('Content-Type', 'text/csv');
res.end(data);
});
};
module.exports = usersController;
| getUsers | identifier_name |
users.js | "use strict";
var user = require('../../user'),
meta = require('../../meta');
var usersController = {};
usersController.search = function(req, res, next) {
res.render('admin/manage/users', {
search_display: '',
loadmore_display: 'hide',
users: []
});
};
usersController.sortByPosts = function(req, res, next) {
getUsers('users:postcount', req, res, next);
};
usersController.sortByReputation = function(req, res, next) {
getUsers('users:reputation', req, res, next);
};
usersController.sortByJoinDate = function(req, res, next) {
getUsers('users:joindate', req, res, next);
};
usersController.banned = function(req, res, next) {
getUsers('users:banned', req, res, next);
};
function getUsers(set, req, res, next) {
user.getUsersFromSet(set, req.uid, 0, 49, function(err, users) {
if (err) |
users = users.filter(function(user) {
return user && parseInt(user.uid, 10);
});
res.render('admin/manage/users', {
search_display: 'hidden',
loadmore_display: 'block',
users: users,
yourid: req.uid,
requireEmailConfirmation: parseInt(meta.config.requireEmailConfirmation, 10) === 1
});
});
}
usersController.getCSV = function(req, res, next) {
user.getUsersCSV(function(err, data) {
if (err) {
return next(err);
}
res.attachment('users.csv');
res.setHeader('Content-Type', 'text/csv');
res.end(data);
});
};
module.exports = usersController;
| {
return next(err);
} | conditional_block |
data-gift-service.js | (function () {
'use strict';
angular
.module("myApp.presents")
.factory("dataGiftService", dataGiftService);
function dataGiftService($mdToast, $mdDialog) {
var dataGiftService = {
notification: notification,
pleaseWaitDialog: pleaseWaitDialog
};
function notification(status, msg, time) {
if (angular.isUndefined(time))
time = 3000;
$mdToast.show(
$mdToast.simple()
.textContent(msg)
.position("top right")
.hideDelay(time)
.theme(status)
);
}
function pleaseWaitDialog(msg, parentEl) {
if (angular.isUndefined(parentEl)) {
parentEl = angular.element(document.body);
}
| ' <md-dialog-content layout="column" layour-align="center center">' +
'<h1 class="text-center">Proszę czekać</h1>' +
'<md-content class="text-center">' + msg + '</md-content>'+
' </md-dialog-content>' +
'</md-dialog>'
});
}
return dataGiftService;
}
})(); | $mdDialog.show({
parent: parentEl,
template: '<md-dialog class="wait-dialog" aria-label="Please wait">' + | random_line_split |
data-gift-service.js | (function () {
'use strict';
angular
.module("myApp.presents")
.factory("dataGiftService", dataGiftService);
function dataGiftService($mdToast, $mdDialog) {
var dataGiftService = {
notification: notification,
pleaseWaitDialog: pleaseWaitDialog
};
function notification(status, msg, time) |
function pleaseWaitDialog(msg, parentEl) {
if (angular.isUndefined(parentEl)) {
parentEl = angular.element(document.body);
}
$mdDialog.show({
parent: parentEl,
template: '<md-dialog class="wait-dialog" aria-label="Please wait">' +
' <md-dialog-content layout="column" layour-align="center center">' +
'<h1 class="text-center">Proszę czekać</h1>' +
'<md-content class="text-center">' + msg + '</md-content>'+
' </md-dialog-content>' +
'</md-dialog>'
});
}
return dataGiftService;
}
})(); | {
if (angular.isUndefined(time))
time = 3000;
$mdToast.show(
$mdToast.simple()
.textContent(msg)
.position("top right")
.hideDelay(time)
.theme(status)
);
} | identifier_body |
data-gift-service.js | (function () {
'use strict';
angular
.module("myApp.presents")
.factory("dataGiftService", dataGiftService);
function dataGiftService($mdToast, $mdDialog) {
var dataGiftService = {
notification: notification,
pleaseWaitDialog: pleaseWaitDialog
};
function notification(status, msg, time) {
if (angular.isUndefined(time))
time = 3000;
$mdToast.show(
$mdToast.simple()
.textContent(msg)
.position("top right")
.hideDelay(time)
.theme(status)
);
}
function | (msg, parentEl) {
if (angular.isUndefined(parentEl)) {
parentEl = angular.element(document.body);
}
$mdDialog.show({
parent: parentEl,
template: '<md-dialog class="wait-dialog" aria-label="Please wait">' +
' <md-dialog-content layout="column" layour-align="center center">' +
'<h1 class="text-center">Proszę czekać</h1>' +
'<md-content class="text-center">' + msg + '</md-content>'+
' </md-dialog-content>' +
'</md-dialog>'
});
}
return dataGiftService;
}
})(); | pleaseWaitDialog | identifier_name |
robot.rs | use wpilib::wpilib_hal::*;
/// The base class from which all robots should be derived.
///
/// # Usage
///
/// ```
/// struct TestRobot {};
///
/// impl Robot for TestRobot {
/// fn new() -> TestRobot {
/// TestRobot{}
/// }
///
/// fn run(self) {
/// // Do something...
/// }
/// }
///
/// fn main() {
/// TestRobot::main();
/// }
/// ```
pub trait Robot: Sized {
/// Run the robot class. This will be called once, at the beginning of the program, after
/// initialization.
fn run(self);
| fn main() {
// Initialize HAL
unsafe {
let status = HAL_Initialize(0);
if status != 1 {
panic!("WPILib HAL failed to initialize!");
}
}
let robot = Self::new();
robot.run();
}
} | /// Create an instance of the robot class.
fn new() -> Self;
/// Run the robot statically. | random_line_split |
robot.rs | use wpilib::wpilib_hal::*;
/// The base class from which all robots should be derived.
///
/// # Usage
///
/// ```
/// struct TestRobot {};
///
/// impl Robot for TestRobot {
/// fn new() -> TestRobot {
/// TestRobot{}
/// }
///
/// fn run(self) {
/// // Do something...
/// }
/// }
///
/// fn main() {
/// TestRobot::main();
/// }
/// ```
pub trait Robot: Sized {
/// Run the robot class. This will be called once, at the beginning of the program, after
/// initialization.
fn run(self);
/// Create an instance of the robot class.
fn new() -> Self;
/// Run the robot statically.
fn | () {
// Initialize HAL
unsafe {
let status = HAL_Initialize(0);
if status != 1 {
panic!("WPILib HAL failed to initialize!");
}
}
let robot = Self::new();
robot.run();
}
}
| main | identifier_name |
robot.rs | use wpilib::wpilib_hal::*;
/// The base class from which all robots should be derived.
///
/// # Usage
///
/// ```
/// struct TestRobot {};
///
/// impl Robot for TestRobot {
/// fn new() -> TestRobot {
/// TestRobot{}
/// }
///
/// fn run(self) {
/// // Do something...
/// }
/// }
///
/// fn main() {
/// TestRobot::main();
/// }
/// ```
pub trait Robot: Sized {
/// Run the robot class. This will be called once, at the beginning of the program, after
/// initialization.
fn run(self);
/// Create an instance of the robot class.
fn new() -> Self;
/// Run the robot statically.
fn main() {
// Initialize HAL
unsafe {
let status = HAL_Initialize(0);
if status != 1 |
}
let robot = Self::new();
robot.run();
}
}
| {
panic!("WPILib HAL failed to initialize!");
} | conditional_block |
robot.rs | use wpilib::wpilib_hal::*;
/// The base class from which all robots should be derived.
///
/// # Usage
///
/// ```
/// struct TestRobot {};
///
/// impl Robot for TestRobot {
/// fn new() -> TestRobot {
/// TestRobot{}
/// }
///
/// fn run(self) {
/// // Do something...
/// }
/// }
///
/// fn main() {
/// TestRobot::main();
/// }
/// ```
pub trait Robot: Sized {
/// Run the robot class. This will be called once, at the beginning of the program, after
/// initialization.
fn run(self);
/// Create an instance of the robot class.
fn new() -> Self;
/// Run the robot statically.
fn main() |
}
| {
// Initialize HAL
unsafe {
let status = HAL_Initialize(0);
if status != 1 {
panic!("WPILib HAL failed to initialize!");
}
}
let robot = Self::new();
robot.run();
} | identifier_body |
130-typed-array.js | const { assert, skip, test, module: describe } = require('qunit');
const { GPU } = require('../../src');
describe('issue #130');
function typedArrays(mode) {
const gpu = new GPU({ mode });
const kernel = gpu.createKernel(function(changes) {
return changes[this.thread.y][this.thread.x];
})
.setOutput([2, 1]);
const values = [new Float32Array(2)];
values[0][0] = 0;
values[0][1] = 0;
const result = kernel(values);
assert.equal(result[0][0], 0);
assert.equal(result[0][1], 0);
gpu.destroy();
}
test("Issue #130 - typed array auto", () => {
typedArrays(null);
});
test("Issue #130 - typed array gpu", () => {
typedArrays('gpu');
}); |
(GPU.isWebGLSupported ? test : skip)("Issue #130 - typed array webgl", () => {
typedArrays('webgl');
});
(GPU.isWebGL2Supported ? test : skip)("Issue #130 - typed array webgl2", () => {
typedArrays('webgl2');
});
(GPU.isHeadlessGLSupported ? test : skip)("Issue #130 - typed array headlessgl", () => {
typedArrays('headlessgl');
});
test("Issue #130 - typed array cpu", () => {
typedArrays('cpu');
}); | random_line_split | |
130-typed-array.js | const { assert, skip, test, module: describe } = require('qunit');
const { GPU } = require('../../src');
describe('issue #130');
function | (mode) {
const gpu = new GPU({ mode });
const kernel = gpu.createKernel(function(changes) {
return changes[this.thread.y][this.thread.x];
})
.setOutput([2, 1]);
const values = [new Float32Array(2)];
values[0][0] = 0;
values[0][1] = 0;
const result = kernel(values);
assert.equal(result[0][0], 0);
assert.equal(result[0][1], 0);
gpu.destroy();
}
test("Issue #130 - typed array auto", () => {
typedArrays(null);
});
test("Issue #130 - typed array gpu", () => {
typedArrays('gpu');
});
(GPU.isWebGLSupported ? test : skip)("Issue #130 - typed array webgl", () => {
typedArrays('webgl');
});
(GPU.isWebGL2Supported ? test : skip)("Issue #130 - typed array webgl2", () => {
typedArrays('webgl2');
});
(GPU.isHeadlessGLSupported ? test : skip)("Issue #130 - typed array headlessgl", () => {
typedArrays('headlessgl');
});
test("Issue #130 - typed array cpu", () => {
typedArrays('cpu');
});
| typedArrays | identifier_name |
130-typed-array.js | const { assert, skip, test, module: describe } = require('qunit');
const { GPU } = require('../../src');
describe('issue #130');
function typedArrays(mode) |
test("Issue #130 - typed array auto", () => {
typedArrays(null);
});
test("Issue #130 - typed array gpu", () => {
typedArrays('gpu');
});
(GPU.isWebGLSupported ? test : skip)("Issue #130 - typed array webgl", () => {
typedArrays('webgl');
});
(GPU.isWebGL2Supported ? test : skip)("Issue #130 - typed array webgl2", () => {
typedArrays('webgl2');
});
(GPU.isHeadlessGLSupported ? test : skip)("Issue #130 - typed array headlessgl", () => {
typedArrays('headlessgl');
});
test("Issue #130 - typed array cpu", () => {
typedArrays('cpu');
});
| {
const gpu = new GPU({ mode });
const kernel = gpu.createKernel(function(changes) {
return changes[this.thread.y][this.thread.x];
})
.setOutput([2, 1]);
const values = [new Float32Array(2)];
values[0][0] = 0;
values[0][1] = 0;
const result = kernel(values);
assert.equal(result[0][0], 0);
assert.equal(result[0][1], 0);
gpu.destroy();
} | identifier_body |
__init__.py | """
Support for MQTT vacuums.
For more details about this platform, please refer to the documentation at
https://www.home-assistant.io/components/vacuum.mqtt/
"""
import logging
import voluptuous as vol
from homeassistant.components.vacuum import DOMAIN
from homeassistant.components.mqtt import ATTR_DISCOVERY_HASH
from homeassistant.components.mqtt.discovery import (
MQTT_DISCOVERY_NEW,
clear_discovery_hash,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .schema import CONF_SCHEMA, LEGACY, STATE, MQTT_VACUUM_SCHEMA
from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy
from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state
_LOGGER = logging.getLogger(__name__)
def validate_mqtt_vacuum(value):
"""Validate MQTT vacuum schema."""
schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE}
return schemas[value[CONF_SCHEMA]](value)
PLATFORM_SCHEMA = vol.All(
MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT vacuum through configuration.yaml."""
await _async_setup_entity(config, async_add_entities, discovery_info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT vacuum dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add a MQTT vacuum."""
try:
discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash
)
except Exception:
if discovery_hash:
|
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash=None
):
"""Set up the MQTT vacuum."""
setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state}
await setup_entity[config[CONF_SCHEMA]](
config, async_add_entities, config_entry, discovery_hash
)
| clear_discovery_hash(hass, discovery_hash) | conditional_block |
__init__.py | """
Support for MQTT vacuums.
For more details about this platform, please refer to the documentation at
https://www.home-assistant.io/components/vacuum.mqtt/
"""
import logging
import voluptuous as vol
from homeassistant.components.vacuum import DOMAIN
from homeassistant.components.mqtt import ATTR_DISCOVERY_HASH
from homeassistant.components.mqtt.discovery import (
MQTT_DISCOVERY_NEW,
clear_discovery_hash,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .schema import CONF_SCHEMA, LEGACY, STATE, MQTT_VACUUM_SCHEMA
from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy
from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state
_LOGGER = logging.getLogger(__name__)
def validate_mqtt_vacuum(value):
"""Validate MQTT vacuum schema."""
schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE}
return schemas[value[CONF_SCHEMA]](value)
PLATFORM_SCHEMA = vol.All(
MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT vacuum through configuration.yaml."""
await _async_setup_entity(config, async_add_entities, discovery_info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT vacuum dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
|
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash=None
):
"""Set up the MQTT vacuum."""
setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state}
await setup_entity[config[CONF_SCHEMA]](
config, async_add_entities, config_entry, discovery_hash
)
| """Discover and add a MQTT vacuum."""
try:
discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash
)
except Exception:
if discovery_hash:
clear_discovery_hash(hass, discovery_hash)
raise | identifier_body |
__init__.py | """
Support for MQTT vacuums.
For more details about this platform, please refer to the documentation at
https://www.home-assistant.io/components/vacuum.mqtt/
"""
import logging
import voluptuous as vol
from homeassistant.components.vacuum import DOMAIN
from homeassistant.components.mqtt import ATTR_DISCOVERY_HASH
from homeassistant.components.mqtt.discovery import (
MQTT_DISCOVERY_NEW,
clear_discovery_hash,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .schema import CONF_SCHEMA, LEGACY, STATE, MQTT_VACUUM_SCHEMA
from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy
from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state
_LOGGER = logging.getLogger(__name__)
def validate_mqtt_vacuum(value):
"""Validate MQTT vacuum schema."""
schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE}
return schemas[value[CONF_SCHEMA]](value)
PLATFORM_SCHEMA = vol.All(
MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT vacuum through configuration.yaml."""
await _async_setup_entity(config, async_add_entities, discovery_info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT vacuum dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add a MQTT vacuum."""
try:
discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash
)
except Exception:
if discovery_hash:
clear_discovery_hash(hass, discovery_hash)
raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(DOMAIN, "mqtt"), async_discover
)
async def | (
config, async_add_entities, config_entry, discovery_hash=None
):
"""Set up the MQTT vacuum."""
setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state}
await setup_entity[config[CONF_SCHEMA]](
config, async_add_entities, config_entry, discovery_hash
)
| _async_setup_entity | identifier_name |
__init__.py | """
Support for MQTT vacuums.
For more details about this platform, please refer to the documentation at
https://www.home-assistant.io/components/vacuum.mqtt/
"""
import logging
import voluptuous as vol
from homeassistant.components.vacuum import DOMAIN
from homeassistant.components.mqtt import ATTR_DISCOVERY_HASH
from homeassistant.components.mqtt.discovery import (
MQTT_DISCOVERY_NEW,
clear_discovery_hash,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .schema import CONF_SCHEMA, LEGACY, STATE, MQTT_VACUUM_SCHEMA
from .schema_legacy import PLATFORM_SCHEMA_LEGACY, async_setup_entity_legacy
from .schema_state import PLATFORM_SCHEMA_STATE, async_setup_entity_state
_LOGGER = logging.getLogger(__name__)
def validate_mqtt_vacuum(value):
"""Validate MQTT vacuum schema."""
schemas = {LEGACY: PLATFORM_SCHEMA_LEGACY, STATE: PLATFORM_SCHEMA_STATE}
return schemas[value[CONF_SCHEMA]](value)
PLATFORM_SCHEMA = vol.All(
MQTT_VACUUM_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), validate_mqtt_vacuum
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up MQTT vacuum through configuration.yaml."""
await _async_setup_entity(config, async_add_entities, discovery_info)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up MQTT vacuum dynamically through MQTT discovery."""
async def async_discover(discovery_payload):
"""Discover and add a MQTT vacuum."""
try:
discovery_hash = discovery_payload.pop(ATTR_DISCOVERY_HASH)
config = PLATFORM_SCHEMA(discovery_payload)
await _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash
)
except Exception:
if discovery_hash: | raise
async_dispatcher_connect(
hass, MQTT_DISCOVERY_NEW.format(DOMAIN, "mqtt"), async_discover
)
async def _async_setup_entity(
config, async_add_entities, config_entry, discovery_hash=None
):
"""Set up the MQTT vacuum."""
setup_entity = {LEGACY: async_setup_entity_legacy, STATE: async_setup_entity_state}
await setup_entity[config[CONF_SCHEMA]](
config, async_add_entities, config_entry, discovery_hash
) | clear_discovery_hash(hass, discovery_hash) | random_line_split |
flat.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Flat trace module
use std::collections::VecDeque;
use rlp::*;
use util::HeapSizeOf;
use basic_types::LogBloom;
use super::trace::{Action, Res};
/// Trace localized in vector of traces produced by a single transaction.
///
/// Parent and children indexes refer to positions in this vector.
#[derive(Debug, PartialEq, Clone, Binary)]
pub struct FlatTrace {
/// Type of action performed by a transaction.
pub action: Action,
/// Result of this action.
pub result: Res,
/// Number of subtraces.
pub subtraces: usize,
/// Exact location of trace.
///
/// [index in root, index in first CALL, index in second CALL, ...]
pub trace_address: VecDeque<usize>,
}
impl FlatTrace {
/// Returns bloom of the trace.
pub fn | (&self) -> LogBloom {
self.action.bloom() | self.result.bloom()
}
}
impl HeapSizeOf for FlatTrace {
fn heap_size_of_children(&self) -> usize {
self.trace_address.heap_size_of_children()
}
}
impl Encodable for FlatTrace {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.action);
s.append(&self.result);
s.append(&self.subtraces);
s.append(&self.trace_address.clone().into_iter().collect::<Vec<_>>());
}
}
impl Decodable for FlatTrace {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let d = decoder.as_rlp();
let v: Vec<usize> = try!(d.val_at(3));
let res = FlatTrace {
action: try!(d.val_at(0)),
result: try!(d.val_at(1)),
subtraces: try!(d.val_at(2)),
trace_address: v.into_iter().collect(),
};
Ok(res)
}
}
/// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone)]
pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
fn from(v: Vec<FlatTrace>) -> Self {
FlatTransactionTraces(v)
}
}
impl HeapSizeOf for FlatTransactionTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl FlatTransactionTraces {
/// Returns bloom of all traces in the collection.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, trace | bloom | trace.bloom())
}
}
impl Encodable for FlatTransactionTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatTransactionTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatTransactionTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
fn into(self) -> Vec<FlatTrace> {
self.0
}
}
/// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn from(v: Vec<FlatTransactionTraces>) -> Self {
FlatBlockTraces(v)
}
}
impl FlatBlockTraces {
/// Returns bloom of all traces in the block.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, tx_traces | bloom | tx_traces.bloom())
}
}
impl Encodable for FlatBlockTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatBlockTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatBlockTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn into(self) -> Vec<FlatTransactionTraces> {
self.0
}
}
#[cfg(test)]
mod tests {
use super::{FlatBlockTraces, FlatTransactionTraces, FlatTrace};
use trace::trace::{Action, Res, CallResult, Call, Suicide};
use types::executed::CallType;
#[test]
fn test_trace_serialization() {
// block #51921
let flat_trace = FlatTrace {
action: Action::Call(Call {
from: "8dda5e016e674683241bf671cced51e7239ea2bc".parse().unwrap(),
to: "37a5e19cc2d49f244805d5c268c0e6f321965ab9".parse().unwrap(),
value: "3627e8f712373c0000".parse().unwrap(),
gas: 0x03e8.into(),
input: vec![],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 0,
};
let flat_trace1 = FlatTrace {
action: Action::Call(Call {
from: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(),
to: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
value: 0.into(),
gas: 0x010c78.into(),
input: vec![0x41, 0xc0, 0xe1, 0xb5],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0x0127.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 1,
};
let flat_trace2 = FlatTrace {
action: Action::Suicide(Suicide {
address: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
balance: 0.into(),
refund_address: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(),
}),
result: Res::None,
trace_address: vec![0].into_iter().collect(),
subtraces: 0,
};
let block_traces = FlatBlockTraces(vec![
FlatTransactionTraces(vec![flat_trace]),
FlatTransactionTraces(vec![flat_trace1, flat_trace2])
]);
let encoded = ::rlp::encode(&block_traces);
let decoded = ::rlp::decode(&encoded);
assert_eq!(block_traces, decoded);
}
}
| bloom | identifier_name |
flat.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Flat trace module
use std::collections::VecDeque;
use rlp::*;
use util::HeapSizeOf;
use basic_types::LogBloom;
use super::trace::{Action, Res};
/// Trace localized in vector of traces produced by a single transaction.
///
/// Parent and children indexes refer to positions in this vector.
#[derive(Debug, PartialEq, Clone, Binary)]
pub struct FlatTrace {
/// Type of action performed by a transaction.
pub action: Action,
/// Result of this action.
pub result: Res,
/// Number of subtraces.
pub subtraces: usize,
/// Exact location of trace.
///
/// [index in root, index in first CALL, index in second CALL, ...]
pub trace_address: VecDeque<usize>,
}
impl FlatTrace {
/// Returns bloom of the trace.
pub fn bloom(&self) -> LogBloom {
self.action.bloom() | self.result.bloom()
}
}
impl HeapSizeOf for FlatTrace {
fn heap_size_of_children(&self) -> usize {
self.trace_address.heap_size_of_children()
}
}
impl Encodable for FlatTrace {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.action);
s.append(&self.result);
s.append(&self.subtraces);
s.append(&self.trace_address.clone().into_iter().collect::<Vec<_>>());
}
}
impl Decodable for FlatTrace {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let d = decoder.as_rlp();
let v: Vec<usize> = try!(d.val_at(3));
let res = FlatTrace {
action: try!(d.val_at(0)),
result: try!(d.val_at(1)),
subtraces: try!(d.val_at(2)),
trace_address: v.into_iter().collect(),
};
Ok(res)
}
}
/// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone)]
pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
fn from(v: Vec<FlatTrace>) -> Self {
FlatTransactionTraces(v)
}
}
impl HeapSizeOf for FlatTransactionTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl FlatTransactionTraces {
/// Returns bloom of all traces in the collection.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, trace | bloom | trace.bloom())
}
}
impl Encodable for FlatTransactionTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatTransactionTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatTransactionTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
fn into(self) -> Vec<FlatTrace> {
self.0
}
}
/// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn from(v: Vec<FlatTransactionTraces>) -> Self {
FlatBlockTraces(v)
}
}
impl FlatBlockTraces {
/// Returns bloom of all traces in the block.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, tx_traces | bloom | tx_traces.bloom())
}
}
impl Encodable for FlatBlockTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatBlockTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatBlockTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn into(self) -> Vec<FlatTransactionTraces> {
self.0
}
}
#[cfg(test)]
mod tests {
use super::{FlatBlockTraces, FlatTransactionTraces, FlatTrace};
use trace::trace::{Action, Res, CallResult, Call, Suicide};
use types::executed::CallType;
#[test]
fn test_trace_serialization() {
// block #51921
let flat_trace = FlatTrace {
action: Action::Call(Call {
from: "8dda5e016e674683241bf671cced51e7239ea2bc".parse().unwrap(),
to: "37a5e19cc2d49f244805d5c268c0e6f321965ab9".parse().unwrap(),
value: "3627e8f712373c0000".parse().unwrap(),
gas: 0x03e8.into(),
input: vec![],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 0,
}; | to: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
value: 0.into(),
gas: 0x010c78.into(),
input: vec![0x41, 0xc0, 0xe1, 0xb5],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0x0127.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 1,
};
let flat_trace2 = FlatTrace {
action: Action::Suicide(Suicide {
address: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
balance: 0.into(),
refund_address: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(),
}),
result: Res::None,
trace_address: vec![0].into_iter().collect(),
subtraces: 0,
};
let block_traces = FlatBlockTraces(vec![
FlatTransactionTraces(vec![flat_trace]),
FlatTransactionTraces(vec![flat_trace1, flat_trace2])
]);
let encoded = ::rlp::encode(&block_traces);
let decoded = ::rlp::decode(&encoded);
assert_eq!(block_traces, decoded);
}
} |
let flat_trace1 = FlatTrace {
action: Action::Call(Call {
from: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(), | random_line_split |
flat.rs | // Copyright 2015, 2016 Ethcore (UK) Ltd.
// This file is part of Parity.
// Parity is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
// Parity is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with Parity. If not, see <http://www.gnu.org/licenses/>.
//! Flat trace module
use std::collections::VecDeque;
use rlp::*;
use util::HeapSizeOf;
use basic_types::LogBloom;
use super::trace::{Action, Res};
/// Trace localized in vector of traces produced by a single transaction.
///
/// Parent and children indexes refer to positions in this vector.
#[derive(Debug, PartialEq, Clone, Binary)]
pub struct FlatTrace {
/// Type of action performed by a transaction.
pub action: Action,
/// Result of this action.
pub result: Res,
/// Number of subtraces.
pub subtraces: usize,
/// Exact location of trace.
///
/// [index in root, index in first CALL, index in second CALL, ...]
pub trace_address: VecDeque<usize>,
}
impl FlatTrace {
/// Returns bloom of the trace.
pub fn bloom(&self) -> LogBloom {
self.action.bloom() | self.result.bloom()
}
}
impl HeapSizeOf for FlatTrace {
fn heap_size_of_children(&self) -> usize {
self.trace_address.heap_size_of_children()
}
}
impl Encodable for FlatTrace {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(4);
s.append(&self.action);
s.append(&self.result);
s.append(&self.subtraces);
s.append(&self.trace_address.clone().into_iter().collect::<Vec<_>>());
}
}
impl Decodable for FlatTrace {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
let d = decoder.as_rlp();
let v: Vec<usize> = try!(d.val_at(3));
let res = FlatTrace {
action: try!(d.val_at(0)),
result: try!(d.val_at(1)),
subtraces: try!(d.val_at(2)),
trace_address: v.into_iter().collect(),
};
Ok(res)
}
}
/// Represents all traces produced by a single transaction.
#[derive(Debug, PartialEq, Clone)]
pub struct FlatTransactionTraces(Vec<FlatTrace>);
impl From<Vec<FlatTrace>> for FlatTransactionTraces {
fn from(v: Vec<FlatTrace>) -> Self |
}
impl HeapSizeOf for FlatTransactionTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl FlatTransactionTraces {
/// Returns bloom of all traces in the collection.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, trace | bloom | trace.bloom())
}
}
impl Encodable for FlatTransactionTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatTransactionTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatTransactionTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTrace>> for FlatTransactionTraces {
fn into(self) -> Vec<FlatTrace> {
self.0
}
}
/// Represents all traces produced by transactions in a single block.
#[derive(Debug, PartialEq, Clone, Default)]
pub struct FlatBlockTraces(Vec<FlatTransactionTraces>);
impl HeapSizeOf for FlatBlockTraces {
fn heap_size_of_children(&self) -> usize {
self.0.heap_size_of_children()
}
}
impl From<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn from(v: Vec<FlatTransactionTraces>) -> Self {
FlatBlockTraces(v)
}
}
impl FlatBlockTraces {
/// Returns bloom of all traces in the block.
pub fn bloom(&self) -> LogBloom {
self.0.iter().fold(Default::default(), | bloom, tx_traces | bloom | tx_traces.bloom())
}
}
impl Encodable for FlatBlockTraces {
fn rlp_append(&self, s: &mut RlpStream) {
s.append(&self.0);
}
}
impl Decodable for FlatBlockTraces {
fn decode<D>(decoder: &D) -> Result<Self, DecoderError> where D: Decoder {
Ok(FlatBlockTraces(try!(Decodable::decode(decoder))))
}
}
impl Into<Vec<FlatTransactionTraces>> for FlatBlockTraces {
fn into(self) -> Vec<FlatTransactionTraces> {
self.0
}
}
#[cfg(test)]
mod tests {
use super::{FlatBlockTraces, FlatTransactionTraces, FlatTrace};
use trace::trace::{Action, Res, CallResult, Call, Suicide};
use types::executed::CallType;
#[test]
fn test_trace_serialization() {
// block #51921
let flat_trace = FlatTrace {
action: Action::Call(Call {
from: "8dda5e016e674683241bf671cced51e7239ea2bc".parse().unwrap(),
to: "37a5e19cc2d49f244805d5c268c0e6f321965ab9".parse().unwrap(),
value: "3627e8f712373c0000".parse().unwrap(),
gas: 0x03e8.into(),
input: vec![],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 0,
};
let flat_trace1 = FlatTrace {
action: Action::Call(Call {
from: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(),
to: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
value: 0.into(),
gas: 0x010c78.into(),
input: vec![0x41, 0xc0, 0xe1, 0xb5],
call_type: CallType::Call,
}),
result: Res::Call(CallResult {
gas_used: 0x0127.into(),
output: vec![],
}),
trace_address: Default::default(),
subtraces: 1,
};
let flat_trace2 = FlatTrace {
action: Action::Suicide(Suicide {
address: "412fda7643b37d436cb40628f6dbbb80a07267ed".parse().unwrap(),
balance: 0.into(),
refund_address: "3d0768da09ce77d25e2d998e6a7b6ed4b9116c2d".parse().unwrap(),
}),
result: Res::None,
trace_address: vec![0].into_iter().collect(),
subtraces: 0,
};
let block_traces = FlatBlockTraces(vec![
FlatTransactionTraces(vec![flat_trace]),
FlatTransactionTraces(vec![flat_trace1, flat_trace2])
]);
let encoded = ::rlp::encode(&block_traces);
let decoded = ::rlp::decode(&encoded);
assert_eq!(block_traces, decoded);
}
}
| {
FlatTransactionTraces(v)
} | identifier_body |
ModularFocalNetwork.py | """
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def range_from_base(base, size):
return xrange(base, base + size)
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer. |
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show() | random_line_split | |
ModularFocalNetwork.py | """
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def | (base, size):
return xrange(base, base + size)
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer.
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show()
| range_from_base | identifier_name |
ModularFocalNetwork.py | """
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def range_from_base(base, size):
return xrange(base, base + size)
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer.
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
|
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show()
| nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1 | conditional_block |
ModularFocalNetwork.py | """
Examples
========
ModularFocalNetwork(8, [1600, 800], 4).plot() => 8 modules, 4 connections to each neuron
"""
import numpy as np
from Plotters import plot_connectivity_matrix
def range_from_base(base, size):
|
class ModularFocalNetwork(object):
def __init__(self, C, dim, focal_width):
"""
Generates connectivity matrix for a modular network with...
C -- # communities/modules
dim -- dimensions of matrix, [nodes_in_target_layer, nodes_in_input_layer]
focal_width -- how connections per node in target layer
Each community will have an even number of nodes, where each node has focal_width
connections from randomly chosen nodes in the input layer.
CIJ[i,j] represents the connection from node j in input layer to node i in this layer.
"""
self.C = C
self.dim = dim
self.module_dim = [layer_size / C for layer_size in dim]
self.focal_width = focal_width
self.CIJ = np.zeros(dim)
for i in range(C):
self.init_module(i)
def init_module(self, module_index):
"""
Initialises the target module with connections from the input layer.
"""
target_dim, input_dim = self.module_dim
input_nodes = range_from_base(module_index * input_dim, input_dim)
target_nodes = range_from_base(module_index * target_dim, target_dim)
for i in target_nodes:
nodes_to_connect = np.random.choice(input_nodes, self.focal_width, replace=False)
self.CIJ[i, nodes_to_connect] = 1
def plot(self):
"""
Uses pyplot to draw a plot of the connectivity matrix
"""
plot_connectivity_matrix(self.CIJ, self.dim).show()
| return xrange(base, base + size) | identifier_body |
api.py | from avatar.templatetags.avatar_tags import avatar_url
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from tastypie import fields
from tastypie.resources import ModelResource
from accounts.models import UserProfile
from main.api.authentication import UberAuthentication
from main.api.serializers import UberSerializer
class UserResource(ModelResource):
#profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True)
class Meta:
|
def dehydrate(self, bundle):
bundle.data['absolute_url'] = reverse('account_user_profile_with_username', kwargs={'username': bundle.obj.username})
bundle.data['best_name'] = bundle.obj.profile.get_best_name()
bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID)
return bundle
class UserProfileResource(ModelResource):
class Meta:
queryset = UserProfile.objects.all()
authentication = UberAuthentication()
resource_name = 'profiles'
| queryset = User.objects.all()
authentication = UberAuthentication()
#authorization = CourseAuthorization()
resource_name = 'users'
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
allowed_methods = ['get']
include_absolute_url = True
serializer = UberSerializer() | identifier_body |
api.py | from avatar.templatetags.avatar_tags import avatar_url
from django.conf import settings
from django.contrib.auth.models import User | from django.core.urlresolvers import reverse
from tastypie import fields
from tastypie.resources import ModelResource
from accounts.models import UserProfile
from main.api.authentication import UberAuthentication
from main.api.serializers import UberSerializer
class UserResource(ModelResource):
#profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True)
class Meta:
queryset = User.objects.all()
authentication = UberAuthentication()
#authorization = CourseAuthorization()
resource_name = 'users'
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
allowed_methods = ['get']
include_absolute_url = True
serializer = UberSerializer()
def dehydrate(self, bundle):
bundle.data['absolute_url'] = reverse('account_user_profile_with_username', kwargs={'username': bundle.obj.username})
bundle.data['best_name'] = bundle.obj.profile.get_best_name()
bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID)
return bundle
class UserProfileResource(ModelResource):
class Meta:
queryset = UserProfile.objects.all()
authentication = UberAuthentication()
resource_name = 'profiles' | random_line_split | |
api.py | from avatar.templatetags.avatar_tags import avatar_url
from django.conf import settings
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from tastypie import fields
from tastypie.resources import ModelResource
from accounts.models import UserProfile
from main.api.authentication import UberAuthentication
from main.api.serializers import UberSerializer
class UserResource(ModelResource):
#profile = fields.ForeignKey('accounts.api.UserProfileResource', 'profile', full=True)
class Meta:
queryset = User.objects.all()
authentication = UberAuthentication()
#authorization = CourseAuthorization()
resource_name = 'users'
fields = ['username', 'first_name', 'last_name', 'last_login', 'profile']
allowed_methods = ['get']
include_absolute_url = True
serializer = UberSerializer()
def | (self, bundle):
bundle.data['absolute_url'] = reverse('account_user_profile_with_username', kwargs={'username': bundle.obj.username})
bundle.data['best_name'] = bundle.obj.profile.get_best_name()
bundle.data['tiny_thumbnail'] = avatar_url(bundle.obj, size=settings.AVATAR_SIZE_IN_ENROLLMENTS_GRID)
return bundle
class UserProfileResource(ModelResource):
class Meta:
queryset = UserProfile.objects.all()
authentication = UberAuthentication()
resource_name = 'profiles'
| dehydrate | identifier_name |
text-editor-registry.d.ts | import { Disposable, TextEditor } from '../index';
/** Experimental: This global registry tracks registered TextEditors. */
export interface TextEditorRegistry {
// Managing Text Editors
/** Remove all editors from the registry. */
clear(): void;
/** Register a TextEditor. */
add(editor: TextEditor): Disposable;
/** Remove the given TextEditor from the registry. */
remove(editor: TextEditor): boolean;
/** Keep a TextEditor's configuration in sync with Atom's settings. */
maintainConfig(editor: TextEditor): Disposable;
/**
* Set a TextEditor's grammar based on its path and content, and continue
* to update its grammar as gramamrs are added or updated, or the editor's
* file path changes.
*/
maintainGrammar(editor: TextEditor): Disposable;
/**
* Force a TextEditor to use a different grammar than the one that would
* otherwise be selected for it. | */
setGrammarOverride(editor: TextEditor, scopeName: string): void;
/**
* Retrieve the grammar scope name that has been set as a grammar override
* for the given TextEditor.
*/
getGrammarOverride(editor: TextEditor): string | null;
/** Remove any grammar override that has been set for the given TextEditor. */
clearGrammarOverride(editor: TextEditor): void;
// Event Subscription
/** Invoke the given callback with all the current and future registered TextEditors. */
observe(callback: (editor: TextEditor) => void): Disposable;
} | random_line_split |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.