text stringlengths 0 1.05M | meta dict |
|---|---|
"""Assorted helper methods"""
from collections import Iterable
from .small_classes import Strings, Quantity
def isequal(a, b):
if (isinstance(a, Iterable) and
not isinstance(a, Strings+(tuple, list, dict))):
for i, a_i in enumerate(a):
if not isequal(a_i, b[i]):
return False
elif a != b:
return False
return True
def mag(c):
"Return magnitude of a Number or Quantity"
if isinstance(c, Quantity):
return c.magnitude
else:
return c
def unitstr(units, into="%s", options="~", dimless='-'):
if hasattr(units, "descr"):
if isinstance(units.descr, dict):
units = units.descr.get("units", dimless)
if units and not isinstance(units, Strings):
try:
rawstr = ("{:%s}" % options).format(units)
except:
rawstr = "1.0 " + str(units.units)
units = "".join(rawstr.replace("dimensionless", dimless).split()[1:])
if units:
return into % units
else:
return ""
def is_sweepvar(sub):
"Determines if a given substitution indicates a sweep."
try:
if sub[0] == "sweep":
if isinstance(sub[1], Iterable) or hasattr(sub[1], "__call__"):
return True
except:
return False
def invalid_types_for_oper(oper, a, b):
"Raises TypeError for unsupported operations."
typea = a.__class__.__name__
typeb = b.__class__.__name__
raise TypeError("unsupported operand types"
" for %s: '%s' and '%s'" % (oper, typea, typeb))
def latex_num(c):
cstr = "%.4g" % c
if 'e' in cstr:
idx = cstr.index('e')
cstr = "%s \\times 10^{%i}" % (cstr[:idx], int(cstr[idx+1:]))
return cstr
def flatten(ible, classes):
"""Flatten an iterable that contains other iterables
Arguments
---------
l : Iterable
Top-level container
Returns
-------
out : list
List of all objects found in the nested iterables
Raises
------
TypeError
If an object is found whose class was not in classes
"""
out = []
for el in ible:
if isinstance(el, classes):
out.append(el)
elif isinstance(el, Iterable):
for elel in flatten(el, classes):
out.append(elel)
else:
raise TypeError("Iterable %s contains element '%s'"
" of invalid class %s." % (ible, el, el.__class__))
return out
| {
"repo_name": "galbramc/gpkit",
"path": "gpkit/small_scripts.py",
"copies": "1",
"size": "2512",
"license": "mit",
"hash": 6641886109513848000,
"line_mean": 25.4421052632,
"line_max": 79,
"alpha_frac": 0.548566879,
"autogenerated": false,
"ratio": 3.8705701078582435,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9911461548261753,
"avg_score": 0.0015350877192982454,
"num_lines": 95
} |
"""Assorted helper methods"""
from collections import Iterable
import numpy as np
def appendsolwarning(msg, data, result, category="uncategorized",
printwarning=False):
"Append a particular category of warnings to a solution."
if printwarning:
print "Warning: %s\n" % msg
if "warnings" not in result:
result["warnings"] = {}
if category not in result["warnings"]:
result["warnings"][category] = []
result["warnings"][category].append((msg, data))
@np.vectorize
def isnan(element):
"Determine if something of arbitrary type is a numpy nan."
try:
return np.isnan(element)
except TypeError:
return False
def maybe_flatten(value):
"Extract values from 0-d numpy arrays, if necessary"
if hasattr(value, "shape") and not value.shape:
return value.flatten()[0] # 0-d numpy arrays
return value
def try_str_without(item, excluded):
"Try to call item.str_without(excluded); fall back to str(item)"
if hasattr(item, "str_without"):
return item.str_without(excluded)
return str(item)
def mag(c):
"Return magnitude of a Number or Quantity"
return getattr(c, "magnitude", c)
def nomial_latex_helper(c, pos_vars, neg_vars):
"""Combines (varlatex, exponent) tuples,
separated by positive vs negative exponent,
into a single latex string"""
# TODO this is awkward due to sensitivity_map, which needs a refactor
pvarstrs = ['%s^{%.2g}' % (varl, x) if "%.2g" % x != "1" else varl
for (varl, x) in pos_vars]
nvarstrs = ['%s^{%.2g}' % (varl, -x)
if "%.2g" % -x != "1" else varl
for (varl, x) in neg_vars]
pvarstrs.sort()
nvarstrs.sort()
pvarstr = ' '.join(pvarstrs)
nvarstr = ' '.join(nvarstrs)
c = mag(c)
cstr = "%.2g" % c
if pos_vars and (cstr == "1" or cstr == "-1"):
cstr = cstr[:-1]
else:
cstr = latex_num(c)
if not pos_vars and not neg_vars:
mstr = "%s" % cstr
elif pos_vars and not neg_vars:
mstr = "%s%s" % (cstr, pvarstr)
elif neg_vars and not pos_vars:
mstr = "\\frac{%s}{%s}" % (cstr, nvarstr)
elif pos_vars and neg_vars:
mstr = "%s\\frac{%s}{%s}" % (cstr, pvarstr, nvarstr)
return mstr
class SweepValue(object):
"Object to represent a swept substitution."
def __init__(self, value):
self.value = value
def is_sweepvar(sub):
"Determines if a given substitution indicates a sweep."
return splitsweep(sub)[0]
def get_sweepval(sub):
"Returns a given substitution's indicated sweep, or None."
return splitsweep(sub)[1]
def splitsweep(sub):
"Splits a substitution into (is_sweepvar, sweepval)"
if isinstance(sub, SweepValue):
return True, sub.value
try:
sweep, value = sub
# pylint:disable=literal-comparison
if sweep is "sweep" and (isinstance(value, Iterable) or
hasattr(value, "__call__")):
return True, value
except (TypeError, ValueError):
pass
return False, None
def latex_num(c):
"Returns latex string of numbers, potentially using exponential notation."
cstr = "%.4g" % c
if 'e' in cstr:
idx = cstr.index('e')
cstr = "%s \\times 10^{%i}" % (cstr[:idx], int(cstr[idx+1:]))
return cstr
| {
"repo_name": "convexopt/gpkit",
"path": "gpkit/small_scripts.py",
"copies": "1",
"size": "3383",
"license": "mit",
"hash": 9010864775684146000,
"line_mean": 28.4173913043,
"line_max": 78,
"alpha_frac": 0.6006503104,
"autogenerated": false,
"ratio": 3.420626895854398,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4521277206254398,
"avg_score": null,
"num_lines": null
} |
"""Assorted Tk-related subroutines used in Grail."""
from types import *
from Tkinter import *
def _clear_entry_widget(event):
try:
widget = event.widget
widget.delete(0, INSERT)
except: pass
def install_keybindings(root):
root.bind_class('Entry', '<Control-u>', _clear_entry_widget)
def make_toplevel(master, title=None, class_=None):
"""Create a Toplevel widget.
This is a shortcut for a Toplevel() instantiation plus calls to
set the title and icon name of the widget.
"""
if class_:
widget = Toplevel(master, class_=class_)
else:
widget = Toplevel(master)
if title:
widget.title(title)
widget.iconname(title)
return widget
def set_transient(widget, master, relx=0.5, rely=0.3, expose=1):
"""Make an existing toplevel widget transient for a master.
The widget must exist but should not yet have been placed; in
other words, this should be called after creating all the
subwidget but before letting the user interact.
"""
widget.withdraw() # Remain invisible while we figure out the geometry
widget.transient(master)
widget.update_idletasks() # Actualize geometry information
if master.winfo_ismapped():
m_width = master.winfo_width()
m_height = master.winfo_height()
m_x = master.winfo_rootx()
m_y = master.winfo_rooty()
else:
m_width = master.winfo_screenwidth()
m_height = master.winfo_screenheight()
m_x = m_y = 0
w_width = widget.winfo_reqwidth()
w_height = widget.winfo_reqheight()
x = m_x + (m_width - w_width) * relx
y = m_y + (m_height - w_height) * rely
widget.geometry("+%d+%d" % (x, y))
if expose:
widget.deiconify() # Become visible at the desired location
return widget
def make_scrollbars(parent, hbar, vbar, pack=1, class_=None, name=None,
takefocus=0):
"""Subroutine to create a frame with scrollbars.
This is used by make_text_box and similar routines.
Note: the caller is responsible for setting the x/y scroll command
properties (e.g. by calling set_scroll_commands()).
Return a tuple containing the hbar, the vbar, and the frame, where
hbar and vbar are None if not requested.
"""
if class_:
if name: frame = Frame(parent, class_=class_, name=name)
else: frame = Frame(parent, class_=class_)
else:
if name: frame = Frame(parent, name=name)
else: frame = Frame(parent)
if pack:
frame.pack(fill=BOTH, expand=1)
corner = None
if vbar:
if not hbar:
vbar = Scrollbar(frame, takefocus=takefocus)
vbar.pack(fill=Y, side=RIGHT)
else:
vbarframe = Frame(frame, borderwidth=0)
vbarframe.pack(fill=Y, side=RIGHT)
vbar = Scrollbar(frame, name="vbar", takefocus=takefocus)
vbar.pack(in_=vbarframe, expand=1, fill=Y, side=TOP)
sbwidth = vbar.winfo_reqwidth()
corner = Frame(vbarframe, width=sbwidth, height=sbwidth)
corner.propagate(0)
corner.pack(side=BOTTOM)
else:
vbar = None
if hbar:
hbar = Scrollbar(frame, orient=HORIZONTAL, name="hbar",
takefocus=takefocus)
hbar.pack(fill=X, side=BOTTOM)
else:
hbar = None
return hbar, vbar, frame
def set_scroll_commands(widget, hbar, vbar):
"""Link a scrollable widget to its scroll bars.
The scroll bars may be empty.
"""
if vbar:
widget['yscrollcommand'] = (vbar, 'set')
vbar['command'] = (widget, 'yview')
if hbar:
widget['xscrollcommand'] = (hbar, 'set')
hbar['command'] = (widget, 'xview')
widget.vbar = vbar
widget.hbar = hbar
def make_text_box(parent, width=0, height=0, hbar=0, vbar=1,
fill=BOTH, expand=1, wrap=WORD, pack=1,
class_=None, name=None, takefocus=None):
"""Subroutine to create a text box.
Create:
- a both-ways filling and expanding frame, containing:
- a text widget on the left, and
- possibly a vertical scroll bar on the right.
- possibly a horizonta; scroll bar at the bottom.
Return the text widget and the frame widget.
"""
hbar, vbar, frame = make_scrollbars(parent, hbar, vbar, pack,
class_=class_, name=name,
takefocus=takefocus)
widget = Text(frame, wrap=wrap, name="text")
if width: widget.config(width=width)
if height: widget.config(height=height)
widget.pack(expand=expand, fill=fill, side=LEFT)
set_scroll_commands(widget, hbar, vbar)
return widget, frame
def make_list_box(parent, width=0, height=0, hbar=0, vbar=1,
fill=BOTH, expand=1, pack=1, class_=None, name=None,
takefocus=None):
"""Subroutine to create a list box.
Like make_text_box().
"""
hbar, vbar, frame = make_scrollbars(parent, hbar, vbar, pack,
class_=class_, name=name,
takefocus=takefocus)
widget = Listbox(frame, name="listbox")
if width: widget.config(width=width)
if height: widget.config(height=height)
widget.pack(expand=expand, fill=fill, side=LEFT)
set_scroll_commands(widget, hbar, vbar)
return widget, frame
def make_canvas(parent, width=0, height=0, hbar=1, vbar=1,
fill=BOTH, expand=1, pack=1, class_=None, name=None,
takefocus=None):
"""Subroutine to create a canvas.
Like make_text_box().
"""
hbar, vbar, frame = make_scrollbars(parent, hbar, vbar, pack,
class_=class_, name=name,
takefocus=takefocus)
widget = Canvas(frame, scrollregion=(0, 0, width, height), name="canvas")
if width: widget.config(width=width)
if height: widget.config(height=height)
widget.pack(expand=expand, fill=fill, side=LEFT)
set_scroll_commands(widget, hbar, vbar)
return widget, frame
def make_form_entry(parent, label, borderwidth=None):
"""Subroutine to create a form entry.
Create:
- a horizontally filling and expanding frame, containing:
- a label on the left, and
- a text entry on the right.
Return the entry widget and the frame widget.
"""
frame = Frame(parent)
frame.pack(fill=X)
label = Label(frame, text=label)
label.pack(side=LEFT)
if borderwidth is None:
entry = Entry(frame, relief=SUNKEN)
else:
entry = Entry(frame, relief=SUNKEN, borderwidth=borderwidth)
entry.pack(side=LEFT, fill=X, expand=1)
return entry, frame
# This is a slightly modified version of the function above. This
# version does the proper alighnment of labels with their fields. It
# should probably eventually replace make_form_entry altogether.
#
# The one annoying bug is that the text entry field should be
# expandable while still aligning the colons. This doesn't work yet.
#
def make_labeled_form_entry(parent, label, entrywidth=20, entryheight=1,
labelwidth=0, borderwidth=None,
takefocus=None):
"""Subroutine to create a form entry.
Create:
- a horizontally filling and expanding frame, containing:
- a label on the left, and
- a text entry on the right.
Return the entry widget and the frame widget.
"""
if label and label[-1] != ':': label = label + ':'
frame = Frame(parent)
label = Label(frame, text=label, width=labelwidth, anchor=E)
label.pack(side=LEFT)
if entryheight == 1:
if borderwidth is None:
entry = Entry(frame, relief=SUNKEN, width=entrywidth)
else:
entry = Entry(frame, relief=SUNKEN, width=entrywidth,
borderwidth=borderwidth)
entry.pack(side=RIGHT, expand=1, fill=X)
frame.pack(fill=X)
else:
entry = make_text_box(frame, entrywidth, entryheight, 1, 1,
takefocus=takefocus)
frame.pack(fill=BOTH, expand=1)
return entry, frame, label
def make_double_frame(master=None, class_=None, name=None, relief=RAISED,
borderwidth=1):
"""Create a pair of frames suitable for 'hosting' a dialog."""
if name:
if class_: frame = Frame(master, class_=class_, name=name)
else: frame = Frame(master, name=name)
else:
if class_: frame = Frame(master, class_=class_)
else: frame = Frame(master)
top = Frame(frame, name="topframe", relief=relief,
borderwidth=borderwidth)
bottom = Frame(frame, name="bottomframe")
bottom.pack(fill=X, padx='1m', pady='1m', side=BOTTOM)
top.pack(expand=1, fill=BOTH, padx='1m', pady='1m')
frame.pack(expand=1, fill=BOTH)
top = Frame(top)
top.pack(expand=1, fill=BOTH, padx='2m', pady='2m')
return frame, top, bottom
def make_group_frame(master, name=None, label=None, fill=Y,
side=None, expand=None, font=None):
"""Create nested frames with a border and optional label.
The outer frame is only used to provide the decorative border, to
control packing, and to host the label. The inner frame is packed
to fill the outer frame and should be used as the parent of all
sub-widgets. Only the inner frame is returned.
"""
font = font or "-*-helvetica-medium-r-normal-*-*-100-*-*-*-*-*-*"
outer = Frame(master, borderwidth=2, relief=GROOVE)
outer.pack(expand=expand, fill=fill, side=side)
if label:
Label(outer, text=label, font=font, anchor=W).pack(fill=X)
inner = Frame(master, borderwidth='1m', name=name)
inner.pack(expand=1, fill=BOTH, in_=outer)
inner.forget = outer.forget
return inner
def unify_button_widths(*buttons):
"""Make buttons passed in all have the same width.
Works for labels and other widgets with the 'text' option.
"""
wid = 0
for btn in buttons:
wid = max(wid, len(btn["text"]))
for btn in buttons:
btn["width"] = wid
def flatten(msg):
"""Turn a list or tuple into a single string -- recursively."""
t = type(msg)
if t in (ListType, TupleType):
msg = ' '.join(map(flatten, msg))
elif t is ClassType:
msg = msg.__name__
else:
msg = str(msg)
return msg
def boolean(s):
"""Test whether a string is a Tk boolean, without error checking."""
if s.lower() in ('', '0', 'no', 'off', 'false'): return 0
else: return 1
def test():
"""Test make_text_box(), make_form_entry(), flatten(), boolean()."""
import sys
root = Tk()
entry, eframe = make_form_entry(root, 'Boolean:')
text, tframe = make_text_box(root)
def enter(event, entry=entry, text=text):
s = boolean(entry.get()) and '\nyes' or '\nno'
text.insert('end', s)
entry.bind('<Return>', enter)
entry.insert(END, flatten(sys.argv))
root.mainloop()
if __name__ == '__main__':
test()
| {
"repo_name": "Jeff-Tian/mybnb",
"path": "Python27/Tools/webchecker/tktools.py",
"copies": "12",
"size": "11552",
"license": "apache-2.0",
"hash": 4962211438567245000,
"line_mean": 29.5628415301,
"line_max": 77,
"alpha_frac": 0.5901142659,
"autogenerated": false,
"ratio": 3.7324717285945073,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""Assorted tools for all aspects of funnelin' that don't fit elsewhere
"""
import logging
import re
import sys
from datetime import date, datetime, timedelta
from typing import Optional
from dateutil.relativedelta import relativedelta
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
from webdriver_manager.firefox import GeckoDriverManager
from webdriver_manager.microsoft import (EdgeChromiumDriverManager,
IEDriverManager)
from webdriver_manager.opera import OperaDriverManager
from jobfunnel.backend import Job
# Initialize list and store regex objects of date quantifiers
HOUR_REGEX = re.compile(r'(\d+)(?:[ +]{1,3})?(?:hour|hr|heure)')
DAY_REGEX = re.compile(r'(\d+)(?:[ +]{1,3})?(?:day|d|jour)')
MONTH_REGEX = re.compile(r'(\d+)(?:[ +]{1,3})?month|mois')
YEAR_REGEX = re.compile(r'(\d+)(?:[ +]{1,3})?year|annee')
RECENT_REGEX_A = re.compile(r'[tT]oday|[jJ]ust [pP]osted')
RECENT_REGEX_B = re.compile(r'[yY]esterday')
def get_logger(logger_name: str, level: int, file_path: str,
message_format: str) -> logging.Logger:
"""Initialize and return a logger
NOTE: you can use this as a method to add logging to any function, but if
you want to use this within a class, just inherit Logger class.
TODO: make more easily configurable w/ defaults
TODO: streamline
"""
logger = logging.getLogger(logger_name)
logger.setLevel(level)
formatter = logging.Formatter(message_format)
stdout_handler = logging.StreamHandler(sys.stdout)
stdout_handler.setFormatter(formatter)
logger.addHandler(stdout_handler)
file_handler = logging.FileHandler(file_path)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
return logger
class Logger:
"""Class that adds a self.logger attribute for stdio and fileio"""
def __init__(self, level: int, file_path: Optional[str] = None,
logger_name: Optional[str] = None,
message_format: Optional[str] = None) -> None:
"""Add a logger to any class
Args:
level (int): logging level, which ought to be an Enum but isn't
file_path (Optional[str], optional): file path to log messages to.
NOTE: this logs at the specified log level.
logger_name (Optional[str], optional): base name for the logger,
should be unique. Defaults to inherited class name.
message_format (Optional[str], optional): the formatting of the
message to log. Defaults to a complete message with all info.
"""
logger_name = logger_name or self.__class__.__name__
message_format = message_format or (
f"[%(asctime)s] [%(levelname)s] {logger_name}: %(message)s"
)
self.logger = get_logger(
logger_name=logger_name,
level=level,
file_path=file_path,
message_format=message_format,
)
def calc_post_date_from_relative_str(date_str: str) -> date:
"""Identifies a job's post date via post age, updates in-place
NOTE: we round to nearest day only so that comparisons dont capture
portions of days.
"""
post_date = datetime.now() # type: date
# Supports almost all formats like 7 hours|days and 7 hr|d|+d
try:
# Hours old
hours_ago = HOUR_REGEX.findall(date_str)[0]
post_date -= timedelta(hours=int(hours_ago))
except IndexError:
# Days old
try:
days_ago = DAY_REGEX.findall(date_str)[0]
post_date -= timedelta(days=int(days_ago))
except IndexError:
# Months old
try:
months_ago = MONTH_REGEX.findall(date_str)[0]
post_date -= relativedelta(months=int(months_ago))
except IndexError:
# Years old
try:
years_ago = YEAR_REGEX.findall(date_str)[0]
post_date -= relativedelta(years=int(years_ago))
except IndexError:
# Try phrases like 'today'/'just posted'/'yesterday'
if RECENT_REGEX_A.findall(date_str) and not post_date:
# Today
post_date = datetime.now()
elif RECENT_REGEX_B.findall(date_str):
# Yesterday
post_date -= timedelta(days=int(1))
elif not post_date:
# We have failed to correctly evaluate date.
raise ValueError(
f"Unable to calculate date from:\n{date_str}"
)
return post_date.replace(hour=0, minute=0, second=0, microsecond=0)
def get_webdriver():
"""Get whatever webdriver is availiable in the system.
webdriver_manager and selenium are currently being used for this.
Supported: Firefox, Chrome, Opera, Microsoft Edge, Internet Explorer
Returns:
webdriver that can be used for scraping.
Returns None if we don't find a supported webdriver.
"""
try:
driver = webdriver.Firefox(
executable_path=GeckoDriverManager().install()
)
except Exception:
try:
driver = webdriver.Chrome(ChromeDriverManager().install())
except Exception:
try:
driver = webdriver.Ie(IEDriverManager().install())
except Exception:
try:
driver = webdriver.Opera(
executable_path=OperaDriverManager().install()
)
except Exception:
try:
driver = webdriver.Edge(
EdgeChromiumDriverManager().install()
)
except Exception:
raise RuntimeError(
"Your browser is not supported. Must have one of "
"the following installed to scrape: [Firefox, "
"Chrome, Opera, Microsoft Edge, Internet Explorer]"
)
return driver
| {
"repo_name": "PaulMcInnis/JobPy",
"path": "jobfunnel/backend/tools/tools.py",
"copies": "1",
"size": "6263",
"license": "mit",
"hash": -9216326928547389000,
"line_mean": 39.6688311688,
"line_max": 79,
"alpha_frac": 0.5824684656,
"autogenerated": false,
"ratio": 4.364459930313589,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5446928395913588,
"avg_score": null,
"num_lines": null
} |
"""Assorted utilities shared between parts of apitools."""
import collections
import os
import random
from protorpc import messages
import six
from six.moves import http_client
import six.moves.urllib.error as urllib_error
import six.moves.urllib.parse as urllib_parse
import six.moves.urllib.request as urllib_request
from googlecloudapis.apitools.base.py import encoding
from googlecloudapis.apitools.base.py import exceptions
__all__ = [
'DetectGae',
'DetectGce',
]
_RESERVED_URI_CHARS = r":/?#[]@!$&'()*+,;="
def DetectGae():
"""Determine whether or not we're running on GAE.
This is based on:
https://developers.google.com/appengine/docs/python/#The_Environment
Returns:
True iff we're running on GAE.
"""
server_software = os.environ.get('SERVER_SOFTWARE', '')
return (server_software.startswith('Development/') or
server_software.startswith('Google App Engine/'))
def DetectGce():
"""Determine whether or not we're running on GCE.
This is based on:
https://cloud.google.com/compute/docs/metadata#runninggce
Returns:
True iff we're running on a GCE instance.
"""
try:
o = urllib_request.build_opener(urllib_request.ProxyHandler({})).open(
urllib_request.Request('http://metadata.google.internal'))
except urllib_error.URLError:
return False
return (o.getcode() == http_client.OK and
o.headers.get('metadata-flavor') == 'Google')
def NormalizeScopes(scope_spec):
"""Normalize scope_spec to a set of strings."""
if isinstance(scope_spec, six.string_types):
return set(scope_spec.split(' '))
elif isinstance(scope_spec, collections.Iterable):
return set(scope_spec)
raise exceptions.TypecheckError(
'NormalizeScopes expected string or iterable, found %s' % (
type(scope_spec),))
def Typecheck(arg, arg_type, msg=None):
if not isinstance(arg, arg_type):
if msg is None:
if isinstance(arg_type, tuple):
msg = 'Type of arg is "%s", not one of %r' % (type(arg), arg_type)
else:
msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type)
raise exceptions.TypecheckError(msg)
return arg
def ExpandRelativePath(method_config, params, relative_path=None):
"""Determine the relative path for request."""
path = relative_path or method_config.relative_path or ''
for param in method_config.path_params:
param_template = '{%s}' % param
# For more details about "reserved word expansion", see:
# http://tools.ietf.org/html/rfc6570#section-3.2.2
reserved_chars = ''
reserved_template = '{+%s}' % param
if reserved_template in path:
reserved_chars = _RESERVED_URI_CHARS
path = path.replace(reserved_template, param_template)
if param_template not in path:
raise exceptions.InvalidUserInputError(
'Missing path parameter %s' % param)
try:
# TODO(craigcitro): Do we want to support some sophisticated
# mapping here?
value = params[param]
except KeyError:
raise exceptions.InvalidUserInputError(
'Request missing required parameter %s' % param)
if value is None:
raise exceptions.InvalidUserInputError(
'Request missing required parameter %s' % param)
try:
if not isinstance(value, six.string_types):
value = str(value)
path = path.replace(param_template,
urllib_parse.quote(value.encode('utf_8'),
reserved_chars))
except TypeError as e:
raise exceptions.InvalidUserInputError(
'Error setting required parameter %s to value %s: %s' % (
param, value, e))
return path
def CalculateWaitForRetry(retry_attempt, max_wait=60):
"""Calculates amount of time to wait before a retry attempt.
Wait time grows exponentially with the number of attempts.
A random amount of jitter is added to spread out retry attempts from different
clients.
Args:
retry_attempt: Retry attempt counter.
max_wait: Upper bound for wait time.
Returns:
Amount of time to wait before retrying request.
"""
wait_time = 2 ** retry_attempt
# randrange requires a nonzero interval, so we want to drop it if
# the range is too small for jitter.
if retry_attempt:
max_jitter = (2 ** retry_attempt) / 2
wait_time += random.randrange(-max_jitter, max_jitter)
return min(wait_time, max_wait)
def AcceptableMimeType(accept_patterns, mime_type):
"""Return True iff mime_type is acceptable for one of accept_patterns.
Note that this function assumes that all patterns in accept_patterns
will be simple types of the form "type/subtype", where one or both
of these can be "*". We do not support parameters (i.e. "; q=") in
patterns.
Args:
accept_patterns: list of acceptable MIME types.
mime_type: the mime type we would like to match.
Returns:
Whether or not mime_type matches (at least) one of these patterns.
"""
unsupported_patterns = [p for p in accept_patterns if ';' in p]
if unsupported_patterns:
raise exceptions.GeneratedClientError(
'MIME patterns with parameter unsupported: "%s"' % ', '.join(
unsupported_patterns))
def MimeTypeMatches(pattern, mime_type):
"""Return True iff mime_type is acceptable for pattern."""
# Some systems use a single '*' instead of '*/*'.
if pattern == '*':
pattern = '*/*'
return all(accept in ('*', provided) for accept, provided
in zip(pattern.split('/'), mime_type.split('/')))
return any(MimeTypeMatches(pattern, mime_type) for pattern in accept_patterns)
def MapParamNames(params, request_type):
"""Reverse parameter remappings for URL construction."""
return [encoding.GetCustomJsonFieldMapping(request_type, json_name=p) or p
for p in params]
def MapRequestParams(params, request_type):
"""Perform any renames/remappings needed for URL construction.
Currently, we have several ways to customize JSON encoding, in
particular of field names and enums. This works fine for JSON
bodies, but also needs to be applied for path and query parameters
in the URL.
This function takes a dictionary from param names to values, and
performs any registered mappings. We also need the request type (to
look up the mappings).
Args:
params: (dict) Map from param names to values
request_type: (protorpc.messages.Message) request type for this API call
Returns:
A new dict of the same size, with all registered mappings applied.
"""
new_params = dict(params)
for param_name, value in params.items():
field_remapping = encoding.GetCustomJsonFieldMapping(
request_type, python_name=param_name)
if field_remapping is not None:
new_params[field_remapping] = new_params.pop(param_name)
if isinstance(value, messages.Enum):
new_params[param_name] = encoding.GetCustomJsonEnumMapping(
type(value), python_name=str(value)) or str(value)
return new_params
| {
"repo_name": "wemanuel/smry",
"path": "smry/server-auth/ls/google-cloud-sdk/.install/.backup/lib/googlecloudapis/apitools/base/py/util.py",
"copies": "4",
"size": "7010",
"license": "apache-2.0",
"hash": 3095757246124527000,
"line_mean": 32.8647342995,
"line_max": 80,
"alpha_frac": 0.6851640514,
"autogenerated": false,
"ratio": 3.8987764182424915,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.007335378538020994,
"num_lines": 207
} |
"""Assorted utilities shared between parts of apitools."""
import random
def calculate_wait_for_retry(retry_attempt, max_wait=60):
"""Calculate the amount of time to wait before a retry attempt.
Wait time grows exponentially with the number of attempts. A
random amount of jitter is added to spread out retry attempts from
different clients.
:type retry_attempt: integer
:param retry_attempt: Retry attempt counter.
:type max_wait: integer
:param max_wait: Upper bound for wait time [seconds].
:rtype: integer
:returns: Number of seconds to wait before retrying request.
"""
wait_time = 2 ** retry_attempt
max_jitter = wait_time / 4.0
wait_time += random.uniform(-max_jitter, max_jitter)
return max(1, min(wait_time, max_wait))
def acceptable_mime_type(accept_patterns, mime_type):
"""Check that ``mime_type`` matches one of ``accept_patterns``.
Note that this function assumes that all patterns in accept_patterns
will be simple types of the form "type/subtype", where one or both
of these can be "*". We do not support parameters (i.e. "; q=") in
patterns.
:type accept_patterns: list of string
:param accept_patterns: acceptable MIME types.
:type mime_type: string
:param mime_type: the MIME being checked
:rtype: boolean
:returns: True if the supplied MIME type matches at least one of the
patterns, else False.
"""
if '/' not in mime_type:
raise ValueError(
'Invalid MIME type: "%s"' % mime_type)
unsupported_patterns = [p for p in accept_patterns if ';' in p]
if unsupported_patterns:
raise ValueError(
'MIME patterns with parameter unsupported: "%s"' % ', '.join(
unsupported_patterns))
def _match(pattern, mime_type):
"""Return True iff mime_type is acceptable for pattern."""
return all(accept in ('*', provided) for accept, provided
in zip(pattern.split('/'), mime_type.split('/')))
return any(_match(pattern, mime_type) for pattern in accept_patterns)
| {
"repo_name": "huangkuan/hack",
"path": "lib/gcloud/streaming/util.py",
"copies": "7",
"size": "2112",
"license": "apache-2.0",
"hash": -3410826896478759400,
"line_mean": 33.6229508197,
"line_max": 73,
"alpha_frac": 0.6581439394,
"autogenerated": false,
"ratio": 4.108949416342412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 61
} |
""" Assorted utility functions and classes """
from os import path, listdir
def is_type(typecheck, data):
"""
Generic type checker
typically used to check that a string can be cast to int or float
"""
try:
typecheck(data)
except ValueError:
return False
else:
return True
'''
def walk(topdir, ignore=None):
"""os.walk with an option to ignore directories"""
for dirpath, dirnames, filenames in os.walk(topdir):
dirnames[:] = [
dirname for dirname in dirnames
if os.path.join(dirpath, dirname) not in ignore]
yield dirpath, dirnames, filenames
'''
def walk(top, topdown=True, onerror=None, followlinks=False, ignore=[]):
"""Modified implementation of os.walk with ignore option"""
islink, join, isdir = path.islink, path.join, path.isdir
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
names = listdir(top)
except OSError as err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
if name not in ignore:
if isdir(join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
if topdown:
yield top, dirs, nondirs
for name in dirs:
new_path = join(top, name)
if followlinks or not islink(new_path):
for step in walk(new_path, topdown, onerror, followlinks):
yield step
if not topdown:
yield top, dirs, nondirs
| {
"repo_name": "ben-albrecht/qcl",
"path": "qcl/utils.py",
"copies": "1",
"size": "1829",
"license": "mit",
"hash": 1111076110269307600,
"line_mean": 28.9836065574,
"line_max": 72,
"alpha_frac": 0.6129032258,
"autogenerated": false,
"ratio": 4.185354691075515,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 61
} |
"""Assorted utility methods for use in creating posters."""
# Copyright 2016-2018 Florian Pigorsch & Contributors. All rights reserved.
#
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
import colour
import math
from typing import List, Optional, Tuple
import s2sphere as s2
from .value_range import ValueRange
from .xy import XY
# mercator projection
def latlng2xy(latlng: s2.LatLng) -> XY:
return XY(lng2x(latlng.lng().degrees), lat2y(latlng.lat().degrees))
def lng2x(lng_deg: float) -> float:
return lng_deg / 180 + 1
def lat2y(lat_deg: float) -> float:
return 0.5 - math.log(math.tan(math.pi / 4 * (1 + lat_deg / 90))) / math.pi
def project(bbox: s2.LatLngRect, size: XY, offset: XY, latlnglines: List[List[s2.LatLng]]) \
-> List[List[Tuple[float, float]]]:
min_x = lng2x(bbox.lng_lo().degrees)
d_x = lng2x(bbox.lng_hi().degrees) - min_x
while d_x >= 2:
d_x -= 2
while d_x < 0:
d_x += 2
min_y = lat2y(bbox.lat_lo().degrees)
max_y = lat2y(bbox.lat_hi().degrees)
d_y = abs(max_y - min_y)
scale = size.x / d_x if size.x / size.y <= d_x / d_y else size.y / d_y
offset = offset + 0.5 * (size - scale * XY(d_x, -d_y)) - scale * XY(min_x, min_y)
lines = []
for latlngline in latlnglines:
line = []
for latlng in latlngline:
if bbox.contains(latlng):
line.append((offset + scale * latlng2xy(latlng)).tuple())
else:
if len(line) > 0:
lines.append(line)
line = []
if len(line) > 0:
lines.append(line)
return lines
def compute_bounds_xy(lines: List[List[XY]]) -> Tuple[ValueRange, ValueRange]:
range_x = ValueRange()
range_y = ValueRange()
for line in lines:
for xy in line:
range_x.extend(xy.x)
range_y.extend(xy.y)
return range_x, range_y
def compute_grid(count: int, dimensions: XY) -> Tuple[Optional[float], Optional[Tuple[int, int]]]:
# this is somehow suboptimal O(count^2). I guess it's possible in O(count)
min_waste = -1
best_counts = None
best_size = None
for count_x in range(1, count+1):
size_x = dimensions.x / count_x
for count_y in range(1, count+1):
if count_x * count_y >= count:
size_y = dimensions.y / count_y
size = min(size_x, size_y)
waste = dimensions.x * dimensions.y - count * size * size
if waste < 0:
continue
elif best_size is None or waste < min_waste:
best_size = size
best_counts = count_x, count_y
min_waste = waste
return best_size, best_counts
def interpolate_color(color1: str, color2: str, ratio: float) -> str:
if ratio < 0:
ratio = 0
elif ratio > 1:
ratio = 1
c1 = colour.Color(color1)
c2 = colour.Color(color2)
c3 = colour.Color(hue=((1 - ratio) * c1.hue + ratio * c2.hue),
saturation=((1 - ratio) * c1.saturation + ratio * c2.saturation),
luminance=((1 - ratio) * c1.luminance + ratio * c2.luminance))
return c3.hex_l
| {
"repo_name": "laufhannes/GpxTrackPoster",
"path": "gpxtrackposter/utils.py",
"copies": "1",
"size": "3281",
"license": "mit",
"hash": 4575488768325399000,
"line_mean": 32.4795918367,
"line_max": 98,
"alpha_frac": 0.5681194758,
"autogenerated": false,
"ratio": 3.1548076923076924,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42229271681076924,
"avg_score": null,
"num_lines": null
} |
"""Assorted utility methods for use in creating posters."""
# Copyright 2016-2021 Florian Pigorsch & Contributors. All rights reserved.
#
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
import locale
from itertools import takewhile, count as itercount
import math
import typing
import colour # type: ignore
import s2sphere # type: ignore
from gpxtrackposter.value_range import ValueRange
from gpxtrackposter.xy import XY
# mercator projection
def latlng2xy(latlng: s2sphere.LatLng) -> XY:
return XY(lng2x(latlng.lng().degrees), lat2y(latlng.lat().degrees))
def lng2x(lng_deg: float) -> float:
return lng_deg / 180 + 1
def lat2y(lat_deg: float) -> float:
return 0.5 - math.log(math.tan(math.pi / 4 * (1 + lat_deg / 90))) / math.pi
def project(
bbox: s2sphere.LatLngRect, size: XY, offset: XY, latlnglines: typing.List[typing.List[s2sphere.LatLng]]
) -> typing.List[typing.List[typing.Tuple[float, float]]]:
min_x = lng2x(bbox.lng_lo().degrees)
d_x = lng2x(bbox.lng_hi().degrees) - min_x
while d_x >= 2:
d_x -= 2
while d_x < 0:
d_x += 2
min_y = lat2y(bbox.lat_lo().degrees)
max_y = lat2y(bbox.lat_hi().degrees)
d_y = abs(max_y - min_y)
scale = size.x / d_x if size.x / size.y <= d_x / d_y else size.y / d_y
offset = offset + 0.5 * (size - scale * XY(d_x, -d_y)) - scale * XY(min_x, min_y)
lines = []
for latlngline in latlnglines:
line = []
for latlng in latlngline:
if bbox.contains(latlng):
line.append((offset + scale * latlng2xy(latlng)).tuple())
else:
if len(line) > 0:
lines.append(line)
line = []
if len(line) > 0:
lines.append(line)
return lines
def compute_bounds_xy(lines: typing.List[typing.List[XY]]) -> typing.Tuple[ValueRange, ValueRange]:
range_x = ValueRange()
range_y = ValueRange()
for line in lines:
for xy in line:
range_x.extend(xy.x)
range_y.extend(xy.y)
return range_x, range_y
def compute_grid(
count: int, dimensions: XY
) -> typing.Tuple[typing.Optional[float], typing.Optional[typing.Tuple[int, int]]]:
# this is somehow suboptimal O(count^2). I guess it's possible in O(count)
min_waste = -1.0
best_size = None
best_counts = None
for count_x in range(1, count + 1):
size_x = dimensions.x / count_x
for count_y in range(1, count + 1):
if count_x * count_y >= count:
size_y = dimensions.y / count_y
size = min(size_x, size_y)
waste = dimensions.x * dimensions.y - count * size * size
if waste < 0:
continue
if best_size is None or waste < min_waste:
best_size = size
best_counts = count_x, count_y
min_waste = waste
return best_size, best_counts
def interpolate_color(color1: str, color2: str, ratio: float) -> str:
if ratio < 0:
ratio = 0
elif ratio > 1:
ratio = 1
c1 = colour.Color(color1)
c2 = colour.Color(color2)
c3 = colour.Color(
hue=((1 - ratio) * c1.hue + ratio * c2.hue),
saturation=((1 - ratio) * c1.saturation + ratio * c2.saturation),
luminance=((1 - ratio) * c1.luminance + ratio * c2.luminance),
)
return c3.hex_l
def format_float(f: float) -> str:
return locale.format_string("%.1f", f)
def make_key_times(year_count: int) -> typing.List[str]:
"""
year_count: year run date count
return: list of key times points
should append `1` because the svg keyTimes rule
"""
s = list(takewhile(lambda n: n < 1, itercount(0, 1 / year_count)))
s.append(1)
return [str(round(i, 2)) for i in s]
| {
"repo_name": "flopp/GpxTrackPoster",
"path": "gpxtrackposter/utils.py",
"copies": "1",
"size": "3880",
"license": "mit",
"hash": -3825925578117574000,
"line_mean": 30.5447154472,
"line_max": 107,
"alpha_frac": 0.5889175258,
"autogenerated": false,
"ratio": 3.1493506493506493,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42382681751506496,
"avg_score": null,
"num_lines": null
} |
"""Assortment of helpers to print/manipulate data."""
import tempfile
import os
import csv
import shutil
from .config import get_config_section
_html_options = get_config_section("HTMLOptions")
_console_options = get_config_section("ConsoleOptions")
def to_html(data):
if not data:
return
fields = data[0]._fields
row_template = ('<tr>' + ''.join("<td>{" + f + "}</td>"
for f in fields) + '</tr>')
header_footer_text = ''.join("<th>" + f + "</th>" for f in fields)
mark_up = ["""<TABLE id="tbResultSet" class="cell-border" cellspacing="0" width="100%">"""]
mark_up.append('<thead><TR>')
mark_up.append(header_footer_text)
mark_up.append('</TR></thead>')
mark_up.append('<tfoot><TR>')
mark_up.append(header_footer_text)
mark_up.append('</TR></tfoot>')
mark_up.append('<tbody>')
for row in data:
mark_up.append(row_template.format_map(row._asdict()))
mark_up.append('</tbody>')
mark_up.append("</TABLE>")
htmlfile_handle, htmlpath = tempfile.mkstemp(".htm", text=True)
tmp = _html_options["Template_HTML"]
tmp = tmp.replace("{{TABLE MARK UP}}", "\n".join(mark_up))
tmp = tmp.replace("{{SCRIPT DIR}}",
os.path.dirname(os.path.realpath(__file__)) +
"\\resources")
print("Creating and opening temp file", htmlpath)
with os.fdopen(htmlfile_handle, mode="w", encoding="UTF-8") as f:
f.write(tmp)
os.startfile(htmlpath)
def to_csv(data):
if not data:
return
fields = data[0]._fields
csvfile_handle, csvpath = tempfile.mkstemp(".csv", text=True)
print("Creating and opening temp file", csvpath)
with os.fdopen(csvfile_handle, mode="w", encoding="UTF-8",
newline='') as f:
writer = csv.DictWriter(f, fieldnames=fields)
writer.writeheader()
for row in data:
writer.writerow(row._asdict())
os.startfile(csvpath)
def to_console(data):
if not data:
return
headers = data[0]._fields
formatted_headers = { f:_console_column_formatter(f) for f in headers }
formatted_data = []
for elem in data:
formatted = { key: _console_column_formatter(val) for key, val in elem._asdict().items() }
formatted_data.append(formatted)
col_lenghts, cols_to_display = _console_cols_to_fit(headers,formatted_headers, formatted_data)
format_strings = { h: "{:<" + str(col_lenghts[h]) + "}" for h in headers }
for h in headers[:cols_to_display]:
print((format_strings[h]).format(formatted_headers[h]), end= "|")
print()
for h in headers[:cols_to_display]:
print('-' * col_lenghts[h],end= "|")
print()
for row in formatted_data:
for h in headers[:cols_to_display]:
print((format_strings[h]).format(row[h]), end= "|")
print()
if cols_to_display < len(headers):
print("\n", cols_to_display, "out of", len(headers), " columns visible.")
def _console_column_formatter(value):
value = str(value)
max_length = int(_console_options['Max_Col_Length'])
if len(value) > max_length:
value = value[:max_length-3] + "..."
return value
def _console_cols_to_fit(headers, formatted_headers, data):
col_lenghts = {}
for h in headers:
lenght = max([len(x[h]) for x in data])
col_lenghts[h] = lenght
# check if header is wider than data
for k, v in col_lenghts.items():
if len(formatted_headers[k]) > v:
col_lenghts[k] = len(formatted_headers[k])
total_chars, _ = shutil.get_terminal_size()
chars_count = 1 # accounts for final EOL char
for col_index, col_name in enumerate(headers):
chars_count += (col_lenghts[col_name] + 1) # each column takes one extra char
if chars_count > total_chars:
return col_lenghts, col_index
else:
return col_lenghts, len(headers)
| {
"repo_name": "sebasmonia/pyquebec",
"path": "pyquebec/formatters.py",
"copies": "1",
"size": "3940",
"license": "mit",
"hash": 1774951417471794400,
"line_mean": 33.8672566372,
"line_max": 98,
"alpha_frac": 0.6027918782,
"autogenerated": false,
"ratio": 3.3994823123382227,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.949035826224907,
"avg_score": 0.002383185657830716,
"num_lines": 113
} |
"""Assortment of utilities for application."""
import itertools
import operator
import os
import random
from typing import List
from flask_sqlalchemy import Model, SQLAlchemy
from pma_api.app import PmaApiFlask
B64_CHAR_SET = ''.join(('abcdefghijklmnopqrstuvwxyz',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'0123456789-_'))
seen = {None}
random.seed(2020)
def next64():
"""Random string generator.
Returns:
str: Randomly generated string.
"""
n_char = 8
result = None
while result in seen:
result = ''.join(random.choice(B64_CHAR_SET) for _ in range(n_char))
seen.add(result)
return result
def most_common(a_list: list):
"""Get most common element in a list
Args:
a_list (list): Any arbitrary list
Returns:
any: pick the highest-count/earliest item
"""
# get an iterable of (item, iterable) pairs
sorted_list = sorted((x, i) for i, x in enumerate(a_list))
groups = itertools.groupby(sorted_list, key=operator.itemgetter(0))
def _auxfun(grp):
"""Auxiliary function to get "quality" for an item
This function should be used in tandem with max()
Args:
grp (iterable): an object to returned by max() if the provided
iterable passed to max() is empty.
"""
item, iterable = grp
count = 0
min_index = len(a_list)
for _, where in iterable:
count += 1
min_index = min(min_index, where)
return count, -min_index
return max(groups, key=_auxfun)[0]
def dict_to_pretty_json(dictionary: {}) -> '':
"""Given a dictionary, pretty print JSON str
Args:
dictionary (dict): dictionary
Returns:
str: Prettified JSON string
"""
import json
return json.dumps(
dictionary,
sort_keys=True,
indent=4,
separators=(',', ': '))
def join_url_parts(*args: str) -> str:
"""Join parts of a url string
Parts of a URL string may come from different sources, so joining them
directly together may yield too many or too few '/' delimiters.
Args:
*args:
Returns:
str: Well-formed url
"""
base_str = '/'.join(args)
return 'http://' + base_str.replace('http://', '').replace('//', '/')
def get_db_models(db: SQLAlchemy) -> List[Model]:
"""Get list of models from SqlAlchemy
Args:
db: SqlAlchemy db object
Returns:
list(Model): List of registered SqlAlchemy models
"""
# noinspection PyProtectedMember
models: List[Model] = \
[cls for cls in db.Model._decl_class_registry.values()
if isinstance(cls, type) and issubclass(cls, db.Model)]
return models
# TODO 2019.03.10-jef: Get this to work
def stderr_stdout_captured(func):
"""Capture stderr and stdout
Args:
func: A function
Returns:
str, str, any: stderr output, stdout output, return of function
"""
import sys
from io import StringIO
old_stdout = sys.stdout
old_stderr = sys.stderr
captured_stderr = sys.stderr = StringIO()
captured_stdout = sys.stdout = StringIO()
returned_value = func()
_err: str = captured_stderr.getvalue()
_out: str = captured_stdout.getvalue()
sys.stdout = old_stdout
sys.stderr = old_stderr
return _err, _out, returned_value
def get_app_instance() -> PmaApiFlask:
"""Get reference to copy of currently running application instance
Returns:
PmaApiFlask: PmaApiFlask application instance.
"""
err = 'A current running app was not able to be found.'
try:
from flask import current_app
app: PmaApiFlask = current_app
if app.__repr__() == '<LocalProxy unbound>':
raise RuntimeError(err)
except RuntimeError:
from pma_api import create_app
app: PmaApiFlask = create_app(os.getenv('ENV_NAME', 'default'))
return app
| {
"repo_name": "joeflack4/pma-api",
"path": "pma_api/utils.py",
"copies": "1",
"size": "3999",
"license": "mit",
"hash": 172155126034156160,
"line_mean": 23.0903614458,
"line_max": 76,
"alpha_frac": 0.6124031008,
"autogenerated": false,
"ratio": 4.015060240963855,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0,
"num_lines": 166
} |
# Assume 1mb MaskROM
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BCM)
counter_clock = 19
counter_clear = 26
mask_oe = 6
mask_cs = 13
snes_d0 = 18
snes_d1 = 23
snes_d2 = 24
snes_d3 = 25
snes_d4 = 12
snes_d5 = 16
snes_d6 = 20
snes_d7 = 21
outputs = [counter_clock, counter_clear, mask_oe, mask_cs]
datapins = [18,23,24,25,12,16,20,21]
for i in datapins:
GPIO.setup(i, GPIO.IN, pull_up_down=GPIO.PUD_DOWN)
for i in outputs:
GPIO.setup(i, GPIO.OUT)
def cycle(states, pins, delay):
for s in states:
for p in pins:
GPIO.output(p, s)
time.sleep(delay)
# Make sure the cartridge and counters are reset
GPIO.output(counter_clock, False)
cycle([True, False], [counter_clear], 1)
a = 0
result = open("rom.sfc", "wb")
while True:
# Some sort of progress
if (a&1023)==0:
print(a)
# Perform snes read
# http://www.cs.umb.edu/~bazz/snes/cartridges/electronics/speeds.html
GPIO.output(mask_oe, True)
GPIO.output(mask_cs, True)
GPIO.output(counter_clock, False)
# Adress line already setup
GPIO.output(mask_oe, False)
GPIO.output(mask_cs, False)
# Wait for data to appear
# Python is so slow so no need to wait!
b = 0
for d in range(8):
s = GPIO.input(datapins[d])
b = b + (s<<d)
result.write("%c" % (b))
# Last supported byte?
if a==(1<<20):
break
GPIO.output(mask_oe, True)
GPIO.output(mask_cs, True)
GPIO.output(counter_clock, True) # Advance to next adr
a = a + 1
# Sleep to make counter advance. Python slow! | {
"repo_name": "breakin/breakin.github.io",
"path": "journal/code/maskrom32extract-v1.py",
"copies": "1",
"size": "1465",
"license": "mit",
"hash": -5902981449660577000,
"line_mean": 17.7948717949,
"line_max": 70,
"alpha_frac": 0.6744027304,
"autogenerated": false,
"ratio": 2.45393634840871,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.362833907880871,
"avg_score": null,
"num_lines": null
} |
# assume admin login has already been handled
import model
import webapp2, jinja2, os, cgi
from datetime import *
from dateutil.parser import *
from google.appengine.api import users
jinja_environment = jinja2.Environment(autoescape=True,
loader=jinja2.FileSystemLoader(os.path.join(os.path.dirname(__file__), 'view/templating')))
class AdminHandler(webapp2.RequestHandler):
def get(self):
# admin check for navbar
isadmin = users.is_current_user_admin()
template_values = {
'isadmin': isadmin,
}
template = jinja_environment.get_template('admin.html')
self.response.out.write(template.render(template_values))
class DateRedirector(webapp2.RequestHandler):
def get(self):
self.redirect("admin")
class EditHandler(webapp2.RequestHandler):
def get(self):
# load the page with a paramater, convert it to a datetime object
date = self.request.get('date')
edit_date_datetime = parse(date)
# convert the datetime object to a date object
edit_date_date = edit_date_datetime.date()
# determine how many blocks are needed
blocks = self.request.get('blocks')
if blocks == "":
blocks = 12
else:
blocks = int(blocks)
# figure out if there is a special schdedule already to display warning
is_special = model.isSpecialSchedule(edit_date_date)
# admin check for navbar
isadmin = users.is_current_user_admin()
# load the template
template_values = {
'edit_date': model.formatDate(edit_date_date),
'blocks': blocks,
'isadmin': isadmin,
'is_special': is_special
}
template = jinja_environment.get_template('edit.html')
self.response.out.write(template.render(template_values))
def post(self):
date = self.request.get('date')
edit_date_datetime = parse(date)
# convert the datetime object to a date object
edit_date_date = edit_date_datetime.date()
# delete any existing schedules to prevent duplicates
model.deleteSchedule(edit_date_date)
maxblocks = self.request.get('blocks')
if maxblocks == "":
maxblocks = 12
else:
maxblocks = int(maxblocks)
iteratingblock = 0
while True:
# pull block name and optional tooltip from form
name = self.request.get("name" + str(iteratingblock))
if name != "":
tooltip = self.request.get("tooltip" + str(iteratingblock))
start = self.request.get("start" + str(iteratingblock))
end = self.request.get("end" + str(iteratingblock))
# parse start and end time inputs
sTime_dt = parse(start, default = edit_date_datetime)
eTime_dt = parse(end, default = edit_date_datetime)
# convert datetime objects to time objects
sTime = sTime_dt.time()
eTime = eTime_dt.time()
# run through backend code
model.createBlock(name, edit_date_date, sTime, eTime, tooltip)
iteratingblock += 1
if iteratingblock == maxblocks:
break
# redirect to the schedule for the date just edited
self.redirect('/schedule?date=' + date)
class SplitLunchHandler(webapp2.RequestHandler):
def get(self):
date = self.request.get('lunchurl')
model.changeSplitLunch(date)
self.redirect('/')
class FeebackWriteHandler(webapp2.RequestHandler):
def get(self):
url = self.request.get('feedbackurl')
model.changeFeedback(url)
self.redirect('/')
class RevertDateHandler(webapp2.RequestHandler):
def get(self):
date = self.request.get('dateR')
edit_date_datetime = parse(date)
model.deleteSchedule(edit_date_datetime)
self.redirect('/')
app = webapp2.WSGIApplication([
('/date', DateRedirector),
('/edit', EditHandler),
('/revertdate', RevertDateHandler),
('/admin', AdminHandler),
('/changelunch', SplitLunchHandler),
('/changefeedback', FeebackWriteHandler)
], debug=True) | {
"repo_name": "shickey/BearStatus",
"path": "edit.py",
"copies": "1",
"size": "4615",
"license": "mit",
"hash": -262316631831691680,
"line_mean": 29.3684210526,
"line_max": 95,
"alpha_frac": 0.5677139762,
"autogenerated": false,
"ratio": 4.484936831875608,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5552650808075608,
"avg_score": null,
"num_lines": null
} |
"""Assume atmospheric profile which comes from Sarazin and Tokovinin (2002):
- 0.86" seeing
- tau_0=3.9ms, which is 9.4m/s wind. But lets double this to cover more actual seeing
conditions.
We will just move through the wavefront with nd.interpolation.shift, with a wrap.
"""
import astropy.constants as const
import astropy.units as units
import matplotlib.pyplot as plt
import numpy as np
import scipy.ndimage as nd
import pdb
import os
import glob
import opticstools as ot
plt.ion()
mm_pix = 100
r_0_500 = 0.98*.5e-6/np.radians(0.86/3600)*1000 #In mm
v_wind = 9.4*1000 #9.4 for median seeing. Actually vbar
angle_wind = 0.1
#Fraction of wavefront that is corrected. In practice, this is spatial-frequency
#dependent, with a larger fraction corrected in e.g. tip/tilt modes. However,
#there is additional tip/tilt noise due to vibrations, so likely this isn't
#too far out. Lag multiplies a -5/3 power spectrum by an exponent of 2
#
#Note that for stability, the lag is generally higher by a factor of e.g. 1.5, due
#to the AO gain being decreased.
ao_correction_lag = 1.5*0.004*v_wind/mm_pix #in pix.
t_int = 0.005
nt = 4096
rnoise = 0.35
wl = 2.2e-3
sz = 512
#Comment out one of the blocks below.
#AT
subap_diam = 450.
tel_diam = 1800.
ao_correction_frac = 0.8
#UT
subap_diam = 1000.
tel_diam = 7900.
ao_correction_frac = 0.9
#Servo parameters
Ki = 0.6
tau = 0.003
Tint = 0.003
#For plotting...
g = np.exp(-np.arange(-15,15)**2/2./5**2)
g /= np.sum(g)
#-----------------------
def evolve(delay, time, v_wind, angle_wind, m_px):
"""Evolve atmospheric delays according the atmosphere and angle"""
yshift_in_pix = v_wind*time/m_px*np.sin(angle_wind)
xshift_in_pix = v_wind*time/m_px*np.cos(angle_wind)
if len(delay.shape)==2:
return nd.interpolation.shift(delay,(yshift_in_pix,xshift_in_pix),order=1,mode='wrap')
else:
new_delays = np.empty_like(delay)
for newd, d in zip(new_delays, delay):
newd[:]=nd.interpolation.shift(d,(yshift_in_pix,xshift_in_pix),order=1,mode='wrap')
return new_delays
#------------------
if __name__=="__main__":
#Now create some wavefronts.
delay = np.zeros( (sz, sz) )
#Create a wavefront in mm.
delay_unfiltered = ot.kmf(sz, r_0_pix=r_0_500/mm_pix)*.5e-3/2/np.pi
#Pupil
subap_pup = np.fft.fftshift(ot.utils.circle(sz, subap_diam/mm_pix, interp_edge=True))
subap_pup_ft = np.fft.rfft2(subap_pup/np.sum(subap_pup))
pup = np.fft.fftshift(ot.utils.circle(sz, tel_diam/mm_pix, interp_edge=True))
pup_ft = np.fft.rfft2(pup/np.sum(pup))
#Gaussian mode in pupil
x = ((np.arange(sz) + sz//2) % sz) - sz//2
xy = np.meshgrid(x,x)
#FWHM equal to telescope diameter. Not sure if this is correct!
gbeam = 0.5**( (xy[0]**2 + xy[1]**2)/0.5/(tel_diam/mm_pix)**2 )
gbeam *= pup
gbeam_ft = np.fft.rfft2(gbeam/np.sum(gbeam))
#Now simulate the effect of the AO system. We should be left with
#delay * pup + (1 - delay * subap)
delay[:] = delay_unfiltered
correction = np.fft.irfft2(np.fft.rfft2(delay_unfiltered)*subap_pup_ft)
correction = nd.interpolation.shift(correction, (ao_correction_lag,0),mode='wrap')
delay[:] -= ao_correction_frac * correction
#Subtract the convolution of the pupil with the delay
delay_piston_free = delay_unfiltered - np.fft.irfft2(np.fft.rfft2(delay_unfiltered)*pup_ft)
delay_filtered_piston_free = delay_unfiltered - np.fft.irfft2(np.fft.rfft2(delay_unfiltered)*gbeam_ft)
#plt.figure(2)
#plt.imshow(delay_piston_free)
#plt.colorbar()
#plt.pause(.01)
#fiber_delays is the delay as measured in a fiber mode (e.g. Gravity)
fiber_delays = []
#gbeam delays is a Gaussian-weighted delay
gbeam_delays = []
strehls = []
window = np.ones(nt)
window[:16] *= (np.arange(16)+1)/16
window[-16:] *= ((np.arange(16)+1)/16)[::-1]
#Brute force loop...
for t_ix, t in enumerate(np.arange(nt)*t_int):
if (t_ix % 100 == 0):
print("Done {:d} of {:d} frames".format(t_ix,nt))
#Create the wavefront as a delay within a telescope
new_delay = evolve(delay,t,v_wind, angle_wind, mm_pix)
new_delay -= new_delay[0,0]
new_delay_piston_free = evolve(delay_piston_free,t,v_wind, angle_wind, mm_pix)
#Compute key parameters
fiber_delays += [np.angle(np.sum(np.exp(2j*np.pi*new_delay/wl)*gbeam)/np.sum(gbeam))/2/np.pi*wl - np.sum(new_delay*gbeam)/np.sum(gbeam)]
gbeam_delays += [np.sum(new_delay_piston_free*gbeam)/np.sum(gbeam)]
strehls += [np.exp(-(np.std(new_delay[pup != 0])*1e6/2200*2*np.pi)**2)]
fiber_delays = np.array(fiber_delays)
gbeam_delays = np.array(gbeam_delays)
strehls = np.array(strehls)
print("Mean Strehl: {:5.3f}".format(np.mean(strehls)))
#Lets look at these in the Fourier domain.
ps_gbeam_delays = 2*np.convolve(np.abs(np.fft.rfft(gbeam_delays*window)**2),g, mode='same')
#We know that summing the above and dividing by len(gbeam_delays)^2 gives the mean square.
#Also, integrating should give the mean square.
#Naieve integral: Trapz computes sum multplied by df=1/(t_int * nt)
ps_gbeam_delays *= t_int*1e6 #Convert to microns
ps_gbeam_delays /= nt
ps_gbeam_delays = ps_gbeam_delays[1:]
#Do the same for the fiber delays.
ps_fiber_delays = 2*np.convolve(np.abs(np.fft.rfft(fiber_delays*window)**2),g, mode='same')
ps_fiber_delays *= t_int*1e6
ps_fiber_delays /= nt
ps_fiber_delays = ps_fiber_delays[1:]
fs = (np.arange(nt//2)+1)/nt/t_int
print("RMS gbeam delay (um): {:5.2f}".format(np.sqrt(np.trapz(ps_gbeam_delays, fs))))
print("RMS gbeam delay (um, direct calc): {:5.2f}".format(np.std(gbeam_delays)*1e3))
#Simulate a simple PID servo loop
s = 2j*np.pi*fs
G = Ki*np.exp(-tau*s)*(1-np.exp(-Tint*s))/Tint**2/s**2
G[fs > .5/Tint] = 0
error = np.abs(1/(1+G))
print("RMS corrected gbeam delay (um): {:5.3f}".format(np.sqrt(np.trapz(ps_gbeam_delays*error**2, fs))))
print("RMS fiber delay (um): {:5.3f}".format(np.std(fiber_delays)*1e3))
plt.figure(1)
plt.clf()
plt.loglog(fs, ps_gbeam_delays, 'b-', label='Fiber Piston Definition')
plt.loglog(fs, ps_gbeam_delays*error**2, 'b:', label='Corrected Fiber Piston Definition')
plt.loglog(fs, ps_fiber_delays, 'g-', label='Third order phase (K fringe tracker)')
plt.legend()
plt.xlabel('Frequency (Hz)')
plt.ylabel(r'Power ($\mu$m$^2$/Hz)')
plt.axis([1e-2,1e2, 1e-11,1e3])
| {
"repo_name": "mikeireland/opticstools",
"path": "playground/piston_offsets.py",
"copies": "1",
"size": "6667",
"license": "mit",
"hash": -4567744046596762600,
"line_mean": 36.2458100559,
"line_max": 148,
"alpha_frac": 0.6356682166,
"autogenerated": false,
"ratio": 2.6861402095084608,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38218084261084606,
"avg_score": null,
"num_lines": null
} |
#assume price and category are given:
from PIL import Image
import sys
import math
import numpy
import json
import sys
import os
import web
import sqlite3 as sqlite
# set boundaries in query_padmapper
# from query_padmapper import MAX_LAT, MAX_LON, MIN_LAT, MIN_LON
APP_PATH = os.getcwd()
MIN_LAT = 37.3
MAX_LAT = 37.89
MIN_LON = -122.55
MAX_LON = -121.85
# change these to change how detailed the generated image is
# (1000x1000 is good, but very slow)
MAX_X=1000
MAX_Y=1000
DRAW_DOTS=True
# at what distance should we stop making predictions?
IGNORE_DIST=0.01
def pixel_to_ll(x,y):
delta_lat = MAX_LAT-MIN_LAT
delta_lon = MAX_LON-MIN_LON
# x is lon, y is lat
# 0,0 is MIN_LON, MAX_LAT
x_frac = float(x)/MAX_X
y_frac = float(y)/MAX_Y
lon = MIN_LON + x_frac*delta_lon
lat = MAX_LAT - y_frac*delta_lat
calc_x, calc_y = ll_to_pixel(lat, lon)
if abs(calc_x-x) > 1 or abs(calc_y-y) > 1:
print "Mismatch: %s, %s => %s %s" % (
x,y, calc_x, calc_y)
return lat, lon
def ll_to_pixel(lat,lon):
adj_lat = lat-MIN_LAT
adj_lon = lon-MIN_LON
delta_lat = MAX_LAT-MIN_LAT
delta_lon = MAX_LON-MIN_LON
# x is lon, y is lat
# 0,0 is MIN_LON, MAX_LAT
lon_frac = adj_lon/delta_lon
lat_frac = adj_lat/delta_lat
x = int(lon_frac*MAX_X)
y = int((1-lat_frac)*MAX_Y)
return x,y
def load_prices(rawInputCategory): # her row u bir list olarak cekebiliyorum zaten = rawData each line = line query = rating, Query_price, bus_id, lat, lon
#con.commit yzman lazim
#her line data olacak sekilde cek
#verdigin kriter raw inputla aldigin olsun where category = rawInputCategory
category = rawInputCategory
conn = sqlite.connect(APP_PATH + '/data/yelpdb.sqlite')
cur = conn.cursor()
cur.execute('''SELECT rating, query_price, id, latitude, longitude FROM Business WHERE query_category = ?''', ('bars',))
rawData = cur.fetchall()
raw_ratings = []
seen = set()
for line in rawData:
# if not line[0].isdigit(): #rating yoksa
# continue
rating = line[0]
price = line[1]
business_id = line[2]
lat = line[3]
lon = line[4]
if business_id in seen:
continue
else:
seen.add(business_id)
rating, price = int(rating), int(price)
raw_ratings.append((price, rating, float(lat), float(lon)))
slope, y_intercept = linear_regression([(price, rating) for (price, rating, lat, lon) in raw_ratings])
print "slope =", slope
print "y intercept =", y_intercept
x_intercept = -(y_intercept)/slope
print "x intercept =", x_intercept
num_phantom_price = -x_intercept # positive now
ratings = [(rating / (price + num_phantom_price), lat, lon, price) for (price, rating, lat, lon) in raw_ratings]
return ratings, num_phantom_price
def linear_regression(pairs):
xs = [x for (x,y) in pairs]
ys = [y for (x,y) in pairs]
A = numpy.array([xs, numpy.ones(len(xs))])
w = numpy.linalg.lstsq(A.T,ys)[0]
return w[0], w[1]
def distance_squared(x1,y1,x2,y2):
return (x1-x2)*(x1-x2) + (y1-y2)*(y1-y2)
def distance(x1,y1,x2,y2):
return math.sqrt(distance_squared(x1,y1,x2,y2))
# def greyscale(price):
# grey = int(256*float(price)/3000)
# return grey, grey, grey
def color(val, buckets):
if val is None:
return (255,255,255,0)
colors = [(255, 0, 0), #burda 18 renk elemani var o yuzden bucket imiz 18 oluyor
(255, 91, 0),
(255, 127, 0),
(255, 171, 0),
(255, 208, 0),
(255, 240, 0),
(255, 255, 0),
(218, 255, 0),
(176, 255, 0),
(128, 255, 0),
(0, 255, 0),
(0, 255, 255),
(0, 240, 255),
(0, 213, 255),
(0, 171, 255),
(0, 127, 255),
(0, 86, 255),
(0, 0, 255),
]
for rating, color in zip(buckets, colors):
if val > rating:
return color
return colors[-1]
gaussian_variance = IGNORE_DIST/2
gaussian_a = 1 / (gaussian_variance * math.sqrt(2 * math.pi))
gaussian_negative_inverse_twice_variance_squared = -1 / (2 * gaussian_variance * gaussian_variance)
def gaussian(ratings, lat, lon, ignore=None):
num = 0
dnm = 0
c = 0
for rating, plat, plon, _ in ratings:
if ignore:
ilat, ilon = ignore
if distance_squared(plat, plon, ilat, ilon) < 0.0001:
continue
weight = gaussian_a * math.exp(distance_squared(lat,lon,plat,plon) *
gaussian_negative_inverse_twice_variance_squared)
num += rating * weight
dnm += weight
if weight > 2:
c += 1
# don't display any averages that don't take into account at least five data points with significant weight
if c < 5:
return None
return num/dnm
def start(rawInputCategory): #burda raw datadan aldiklarini ver = categor ve belki de sehir
print "loading data..."
rated_points, num_phantom_price = load_prices([rawInputCategory])
print "computing #price adjustments..."
# compute what the error would be at each data point if we priced it without being able to take it into account
# do this on a per-bedroom basis, so that we can compute correction factors
price_categories = list(sorted(set(price for _, _, _, price in rated_points)))
adjustments = {}
for price_category in price_categories:
print " price %s ..." % (price_category)
total_actual = 0
total_predicted = 0
for i, (rating, plat, plon, price) in enumerate(rated_points):
if price != price_category:
continue
x, y = ll_to_pixel(plat, plon)
predicted_price = gaussian(rated_points, plat, plon, ignore=(plat, plon))
if predicted_price:
total_actual += price
total_predicted += predicted_price
if total_predicted == 0:
# we might not make any predictions, if we don't have enough data
adjustment = 1.0
else:
adjustment = total_actual / total_predicted
adjustments[price_category] = adjustment
print "rating all the points..."
ratings = {}
for x in range(MAX_X):
print " %s/%s" % (x, MAX_X)
for y in range(MAX_Y):
lat, lon = pixel_to_ll(x,y)
ratings[x,y] = gaussian(rated_points, lat, lon)
# determine buckets
# we want 18 buckets (17 divisions) of equal area
all_rated_areas = [x for x in sorted(ratings.values()) if x is not None]
total_rated_area = len(all_rated_areas)
buckets = []
divisions = 17.0
stride = total_rated_area / (divisions + 1)
next_i = int(stride)
error_i = stride - next_i
for i, val in enumerate(all_rated_areas):
if i == next_i:
buckets.append(val)
delta_i = stride + error_i
next_i += int(delta_i)
error_i = delta_i - int(delta_i)
buckets.reverse()
print "buckets: ", buckets
# color regions by price
I = Image.new('RGBA', (MAX_X, MAX_Y)) # modes: http://pillow.readthedocs.io/en/3.4.x/handbook/concepts.html#concept-modes
IM = I.load()
for x in range(MAX_X):
for y in range(MAX_Y):
IM[x,y] = color(ratings[x,y], buckets)
if DRAW_DOTS:
for _, lat, lon, _ in rated_points:
x, y = ll_to_pixel(lat, lon)
if 0 <= x < MAX_X and 0 <= y < MAX_Y:
IM[x,y] = (0,0,0) #that is color - probably black for dots
out_rawInputCategory = rawInputCategory + ".phantom." + str(MAX_X)
I.save(out_rawInputCategory + ".png", "PNG")
with open(out_rawInputCategory + ".metadata.json", "w") as outf:
outf.write(json.dumps({
"num_phantom_price": num_phantom_price,
"buckets": buckets,
"n": len(rated_points),
"adjustments": adjustments}))
if __name__ == "__main__":
if len(sys.argv) != 2:
print "usage: python draw_heatmap.py apts.txt"
else:
rawInputCategory = sys.argv[1]
start(rawInputCategory)
| {
"repo_name": "selinerguncu/Yelp-Spatial-Analysis",
"path": "maps/pilMap.py",
"copies": "1",
"size": "8141",
"license": "mit",
"hash": 5808759682872090000,
"line_mean": 27.6654929577,
"line_max": 155,
"alpha_frac": 0.5912050117,
"autogenerated": false,
"ratio": 3.1900470219435735,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42812520336435733,
"avg_score": null,
"num_lines": null
} |
# Assume python 2.7
# Path to the interpreter is deliberately not mentioned.
import os
import errno
import sys
import optparse
import shlex
import subprocess
import threading
import getpass
from datetime import datetime
import json
def print_and_exit(rc, message):
if len(message):
print message
sys.exit(rc)
def is_windows():
return os.getenv('OS') == 'Windows_NT'
def is_supported_os():
if not is_windows():
return True
if os.getenv('USERPROFILE'):
return True
print_and_exit(1, 'Unsupported Windows version, should be Vista or higher')
return False
# A really appealing option for crypto library:
# Stanford Javascript Crypto Library http://bitwiseshiftleft.github.io/sjcl/
# BSD or GPLv2 license
#
# For now, we use openssl
# http://how-to.linuxcareer.com/using-openssl-to-encrypt-messages-and-files
#
def encrypt(lines):
return run_proc(lines, 'openssl enc -des-ecb -pass env:WIMP_PASS')
def decrypt(encrypted_lines):
return run_proc(encrypted_lines, 'openssl enc -d -des-ecb -pass env:WIMP_PASS')
def hash_rsa(password):
hashed = run_proc(password, 'openssl dgst -sha1')
return hashed[0:-1] # drop 'newline' that is appended by openssl
def pump_input(pipe, lines):
with pipe:
for line in lines:
pipe.write(line)
def run_proc(input_string, command):
p = subprocess.Popen(command.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=1)
threading.Thread(target=pump_input, args=[p.stdin, input_string]).start()
with p.stdout:
output_string = p.stdout.read()
p.wait()
return output_string
def run_pipe(cmds):
# Assemble a pipe line:
# - First stage in pipe: don't override stdin
# - Mid/Last stages: wire ins and outs
#
# Last stage's stdout must be PIPE, otherwise communicate won't capture stdout
pipe = []
pipe.append(subprocess.Popen(shlex.split(cmds[0]), stdout=subprocess.PIPE))
for i in range(1, len(cmds)):
pipe.append(subprocess.Popen(shlex.split(cmds[i]), stdin=pipe[-1].stdout, stdout=subprocess.PIPE))
# Allow pipe[i] to receive a SIGPIPE if pipe[i+1] exits. Do not apply this to the last proc in pipe !!!
for p in pipe[0:-1]:
p.stdout.close()
(out, err) = pipe[-1].communicate()
if err:
print_and_exit(2, 'pipe error: ' + str(err))
return out
def default_path():
if is_windows():
return os.path.join(os.getenv('USERPROFILE'), 'wimp')
return os.path.join(os.getenv('HOME'), '.wimp')
def cl_parse(argv):
description="\
WIMP (Where Is My Password) password manager. Supports multiple tags, \
passwords history, AES encryption. \ New passwords are read from stdin. JSON \
is used for all options that take complex arguments. PASSWORD is formatted as \
a single JSON object, TAGS is formatted as a JSON list of strings. Unless '--\
path' is given, passwords are stored in $HOME/.wimp/ on Linux or in \
$USERPROFILE/wimp on Windows. With '--path' multiple password storages can be \
used. Each passwords storage is protected with a single master password. \
"
usage="python %prog <--add|--update|--delete|--list_all> [options]"
epilog='''
Example 1: Add new password
python wimp.py --add="{'title':'stackoverflow', 'username':'my_user_name', \\
'url'='http://stackoverflow.com/', 'tags':'fun'}"
Example 2: List all passwords tagged as 'fun' and/or 'work'
python wimp.py --list_by_tags="['fun','work']"
'''
# dont strip new-lines from multiline epilog but just print it as-is
optparse.OptionParser.format_epilog = lambda self, formatter: self.epilog
parser = optparse.OptionParser(description=description, usage=usage,
version="%prog 0.1", epilog=epilog)
group = optparse.OptionGroup(parser, "Basics", "")
group.add_option("--start", action="store_true", dest="start",
help="Start session")
group.add_option("--end", action="store_true", dest="end",
help="End session")
group.add_option("--add", dest="add_entry", metavar="{id:value, password:value, tags:value, other:value, ...}",
help="Add new password.")
group.add_option("--update", dest="update_entry", metavar="{id:value, password:value, tags:value, other:value, ...}",
help="Update fields of existing entry")
group.add_option("--delete", dest="delete_entry_id", metavar="id",
help="Delete existing entry")
group.add_option("--list", action="store_true", dest="list_all",
help="List all passwords")
parser.add_option_group(group)
group = optparse.OptionGroup(parser, "Advanced", "")
group.add_option("--list_by_tags", dest="list_by_tags", metavar="[TAGS]",
help="List passwords filtered by a list of tags")
group.add_option("--list_tags", dest="list_tags", metavar="[TAGS]",
help="List tags")
group.add_option("--echo", dest="echo_password", action="store_true",
help="Echo when typing a new password. By default this option is 'off'.")
group.add_option("--path", dest="path", default=default_path(), metavar="PATH",
help="Path to passwords repository. If omitted, the default is " + default_path() + ".")
parser.add_option_group(group)
(opt, left_over_args) = parser.parse_args(argv)
print opt
# Check that exactly one option in this list is not None
# TODO: mutually exclusive options and subcommands are supported in argparse package. Consider using it.
mandatory_exclusives = [opt.start, opt.end, opt.add_entry, opt.update_entry, opt.delete_entry_id, opt.list_all]
if len(mandatory_exclusives) - mandatory_exclusives.count(None) != 1:
print_and_exit(3, usage)
return (opt, left_over_args)
def get_timestamp():
return datetime.now().strftime("%y-%m-%d_%H:%M:%S")
def make_sure_path_exists(path):
try:
os.makedirs(path)
except OSError as exception:
if exception.errno != errno.EEXIST:
raise
def master_password_new(path, master_hash_file):
print 'Initializing wimp repo in', path
make_sure_path_exists(path)
password = getpass.getpass('New master password:')
if not password:
print_and_exit(5, "Empty string is not allowed")
return None
password2 = getpass.getpass('Confirm master password:')
if password != password2:
print_and_exit(5, "Passwords do not match")
return None
with open(master_hash_file, 'w') as f:
f.write(hash_rsa(password))
print 'OK'
return password
def master_password_verify(master_hash_file):
# ask user for master password
password = getpass.getpass('Master password:')
with open(master_hash_file) as f:
fdata = f.read()
if fdata != hash_rsa(password):
print_and_exit(5, 'MISMATCH')
return None
print 'MATCH: OK'
return password
db_dict={}
db_path=""
def db_store():
db_dict['laststore'] = get_timestamp()
with open(db_path, 'w') as f:
f.write(encrypt(json.dumps(db_dict)))
def db_load():
global db_dict
with open(db_path) as f:
db_dict = json.loads(decrypt(f.read()))
db_dict['lastload'] = get_timestamp()
def init_repo(path):
global db_dict, db_path
# Create repo if none exists
# Create new or verify existing master password
master_hash_file = os.path.join(path, 'master.hash')
if os.path.exists(master_hash_file):
password = master_password_verify(master_hash_file)
else:
password = master_password_new(path, master_hash_file)
os.putenv('WIMP_PASS', password)
# DB: load if exists, or create new + store
db_path = os.path.join(path, 'db.wimp')
if os.path.isfile(db_path):
db_load()
else:
db_dict['born'] = get_timestamp()
db_store()
return 0
def main(argv):
is_supported_os() # exit if not
(opt, left_over_args) = cl_parse(argv)
init_repo(opt.path)
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| {
"repo_name": "alexeypolo/wimp",
"path": "wimp.py",
"copies": "1",
"size": "7996",
"license": "mit",
"hash": 9082865074527243000,
"line_mean": 33.0255319149,
"line_max": 121,
"alpha_frac": 0.6585792896,
"autogenerated": false,
"ratio": 3.524019391802556,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9645748231343692,
"avg_score": 0.0073700900117730185,
"num_lines": 235
} |
# assumes all letters are lowercase alphabetical (all uppercase should work too, but not mixed or any other characters or non-characters)
from bisect import bisect # binary search
'''
ALPHABET_LEN = ord('z') - ord('a') + 1
def dist_to_letter(target, letter):
if target == letter:
return ALPHABET_LEN # because it must be strictly "bigger", the target itself is furthest
return (ord(letter) - ord(target) + ALPHABET_LEN) % ALPHABET_LEN
'''
class Solution:
# non-optimized v1 - does not take pre-sort into account. O(n) time, O(1) space. Of course, if letters are non-repeating, n<=26 so it's no big deal. In fact, overhead for a more asymptotically optimized solution might make actual complexity worse
'''
def nextGreatestLetter(self, letters, target):
"""
:type letters: List[str]
:type target: str
:rtype: str
"""
best_l = letters[0]
best_d = dist_to_letter(target, letters[0])
for l in letters[1:]:
d = dist_to_letter(target, l)
if d < best_d:
best_d = d
best_l = l
return best_l
'''
# optimized v2 - O(log(n)) time, O(1) space
# idea: letters are sorted alphabetically, but due to wraparound distance is not quite. However, it would be enough to find the closest letter and take the next one if it's smaller or equal, wrapping around if necessary
def nextGreatestLetter(self, letters, target):
i = bisect(letters, target) # this variant returns index just after any appearance of target or anything smaller - perfect. No option for a key function but default comparator works on characters as necessary
return letters[0] if i >= len(letters) else letters[i] | {
"repo_name": "SelvorWhim/competitive",
"path": "LeetCode/FindSmallestLetterGreaterThanTarget.py",
"copies": "1",
"size": "1758",
"license": "unlicense",
"hash": 3601526609144449500,
"line_mean": 46.5405405405,
"line_max": 250,
"alpha_frac": 0.6587030717,
"autogenerated": false,
"ratio": 3.796976241900648,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9910063914434042,
"avg_score": 0.009123079833321416,
"num_lines": 37
} |
# assumes credential file in .aws folder in user directory
import json
import os
import boto3
import metadata
s3 = boto3.resource('s3')
def main():
bucket_name = 'canal-photos-chester'
bucket_region = 'us-west-1'
bucket = s3.Bucket(bucket_name)
photo_directory = r'C:\_webdev\maps\canal-map-photos\canal-photos-chester\upload'
# upload_photos(photo_directory, bucket)
# download_photos(photo_directory, bucket)
# delete_all_keys(bucket)
metadata_from_bucket(bucket, bucket_region)
def metadata_from_bucket(bucket, region):
data = {'rows': []}
rows = []
client = boto3.client('s3', region)
for key in bucket.objects.filter(Prefix='popup'):
obj = key.Object()
url = "{}/{}/{}".format(
client.meta.endpoint_url, bucket.name, key.key
)
thumbnail_url = url.replace("popup", "thumbnail")
d = {
'lat': obj.metadata['lat'],
'lng': obj.metadata['lng'],
'caption': obj.metadata['caption'],
'datetime': obj.metadata['datetime'],
'width': obj.metadata['width'],
'height': obj.metadata['height'],
'thumbnail': thumbnail_url,
'url': url,
'video': '',
}
rows.append(d)
data['rows'] = rows
print(json.dumps(data))
def upload_photos(photo_directory, bucket):
"""Upload a directory of files to an s3 bucket. Check for existence
of key before uploading each file."""
photos = [
os.path.join(photo_directory, f)
for f in os.listdir(photo_directory)
if f.endswith(('jpg', 'jpeg', 'JPG', 'JPEG'))
]
keys = [obj.key for obj in bucket.objects.all()]
for photo in photos:
photo_name = os.path.basename(photo)
if photo_name not in keys:
data = open(photo, 'rb')
photo_metadata = metadata.load_metadata(photo)
bucket.put_object(
ACL='public-read',
Key=photo_name,
Metadata=photo_metadata,
Body=data,
)
print("{} uploaded".format(photo_name))
size = sum(1 for _ in bucket.objects.all())
print("{} contains {} photos".format(bucket.name, size))
def download_photos(photo_directory, bucket):
"""Download all files from an s3 bucket to a local directory. Check for
existence of local file before downloading each file."""
photos = [
f for f in os.listdir(photo_directory)
if f.endswith(('jpg', 'jpeg', 'JPG', 'JPEG'))
]
keys = [obj.key for obj in bucket.objects.all()]
for key in keys:
if key not in photos:
filename = os.path.join(photo_directory, key)
bucket.download_file(key, filename)
print("{} downloaded".format(key))
def delete_all_keys(bucket):
"""Delete all keys from an s3 bucket."""
for key in bucket.objects.all():
key.delete()
print("{} deleted".format(key.key))
if __name__ == '__main__':
main()
| {
"repo_name": "gdmf/canal-map",
"path": "utils/boto_ops.py",
"copies": "1",
"size": "3046",
"license": "unlicense",
"hash": -4041823105450446300,
"line_mean": 27.2037037037,
"line_max": 85,
"alpha_frac": 0.5778069599,
"autogenerated": false,
"ratio": 3.798004987531172,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48758119474311723,
"avg_score": null,
"num_lines": null
} |
# Assumes DB Drivers are installed correctly
import json
import requests
import pyodbc
from bs4 import BeautifulSoup
from random import shuffle
# SQL Server Example
# db007 = {
# "Driver": "{SQL Server}",
# "Server": "server.domain.site.com",
# "Database": "DatabaseName",
# "Trusted_Connection": "yes"
# }
#MySQL example -- requires listed driver to be installed
# data001 = {
# "Driver": "{MySQL ODBC 5.3 Unicode Driver}",
# "Server": "server.domain.site.com'",
# "Database": "databasename",
# "UID": "uid",
# "PASSWORD": "password"
# }
# Local SQL Server example
db007 = {
"Driver": "{SQL Server}",
"Server": "localhost",
"Database": "DatabaseName",
"Trusted_Connection": "yes"
}
def get_market_list():
""" Query a database for a set of URLs and return those in a randomly sorted list """
#Establish a database connection and create a cursor
db_connection = pyodbc.connect(**db007)
db_cursor = db_connection.cursor()
# Make a list and fill it with URIs from a database table
markets = []
for row in db_cursor.execute("select uri from dbo.URIList where status = 0"):
market = row.uri
markets.append(market)
# Randomly sort that list before returning it
shuffle(markets)
db_connection.close()
return(markets)
def get_web_page(market):
""" For a single market, scrape the HTML, removing all javascript and style elements. Returns all the visible text on the web page """
r = requests.get(market)
soup = BeautifulSoup(r.content.decode("utf-8"))
for i in soup.find_all(['script', 'style', 'a']):
i.extract()
htmltext = soup.get_text().replace('\n','\n\n')
return(htmltext)
# Get list of all URLs we want to scrape
markets = get_market_list()
# Open a persistent DB connection
db_connection = pyodbc.connect(**db007)
db_cursor = db_connection.cursor()
# For each URL in the list of markets, retrieve the text contents and write to a database
for market in markets:
htmltext = get_web_page(market)
print(market)
db_cursor.execute("update dbo.URIList set status = 1, htmltext = ? where uri = ?", htmltext, market)
db_cursor.commit()
db_cursor.close()
| {
"repo_name": "jebstone/examples",
"path": "very_basic_scraper_with_results_to_db.py",
"copies": "1",
"size": "2149",
"license": "unlicense",
"hash": -3143858695658119700,
"line_mean": 23.988372093,
"line_max": 135,
"alpha_frac": 0.6863657515,
"autogenerated": false,
"ratio": 3.285932721712538,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44722984732125376,
"avg_score": null,
"num_lines": null
} |
# Assumes dict d of information about found citations is given
# Need to marry these functions with whichever keys are options
# This really just gives some of the basic logic for how to display the pulled data
# Sample inputs
# d = {'citations': [{'citation_date': '2015-03-09', 'citation_number': 789674515, 'last_name': 'Phillips', 'violations': [{'violation_number': 123455, 'violation_description': 'public intoxication'}, {'violation_number': 333333, 'violation_description': 'indecent exposure'}], 'court_location': 'COUNTRY CLUB HILLS', 'court_address': '7422 Eunice Avenue', 'court_date': '2015-11-06', 'first_name': 'Wanda'}, {'citation_date': '2015-09-16', 'citation_number': 513276502, 'court_date': '2016-01-03', 'violations': [{'violation_number': 343343, 'violation_description': 'trespassing'}], 'court_location': 'FLORISSANT', 'court_address': '315 Howdershell Road', 'first_name': 'William', 'last_name': 'Ferrell'}]}
# c = {'court_location': 'COUNTRY CLUB HILLS', 'court_address': '7422 Eunice Avenue', 'phone': '314-555-5555'}
# w = {'warrants': [{'warrant_number': '12345678-A', 'zip_code': '63139'}, {'warrant_number': '98765432-X', 'zip_code': '63101'}]}
# Function checks that dict[key] value is not blank.
def notBlank(s):
if(s == ''):
return 'unavailable'
else:
return s
# Pass key and dict of info
# Modify according to options available to user
def printKey(key, d):
if (key in ['court_date', 'court_location', 'violation_description', 'violation_number']):
print("Your %s is %s." % (key.replace('_', ' '), notBlank(d[key])))
elif (key == 'phone'):
print("The phone number is %s." % d[key])
else:
print("The %s is %s." %(key.replace('_', ' '), notBlank(d[key])))
# Ticket Lookup modeled after web interface
# Assumes match was found and dict d returned has all citation info
def listCitations(d):
print("%d citation(s) found" % len(d['citations']))
# Print numbered list of citations found
for i in range(0, len(d['citations'])):
t = d['citations'][i] # Go through list of citations
print("%d: Cit# %d on %s to %s %s" % (i, t['citation_number'], t['citation_date'], t['first_name'], t['last_name']) )
# Get more info on chosen citation indCit = 0, 1, ...
# Pass it an index (selected by user) and the dict of citations
# e.g. getCitation(0, d)
def getCitation(indCit, d):
keyList = ['court_date', 'court_location', 'court_address']
#indCit = index of citation returned by user
# Have not married citations with violations yet
t = d['citations'][indCit] # Selected citation
print("Citation # %d for %d violations" % (t['citation_number'], len(t['violations'])) )
for i in range(0, len(keyList)):
printKey(keyList[i], t)
#Prompt user for violations?
# Get list of violations and descriptions
# Function is passed a list of violations v = d['citations'][indCit]['violations'] attached to user's chosen citation
def getViolations(v):
for i in range(0, len(v)):
print('Violation %d: %d' % (i+1, v[i]['violation_number']))
print('For: %s \n' % v[i]['violation_description'])
# Court Lookup modeled after web interface
# Assumes court was found and dict d returned has all court
# e.g. courtLookup(c)
# c defined at top of file
def courtLookup(d):
# For Court Lookup
keyList = ['court_location', 'court_address', 'phone']
# For the given address the ticket was issued
# d is now a dict of court information for address lookup of ticket
for i in range(0, len(keyList)):
printKey(keyList[i], d)
# Ticket Lookup modeled after web interface
# Assumes match was found and dict w returned has all warrant info
def getWarrants(w):
print("%d warrant(s) found" % len(w['warrants']))
# Print numbered list of citations found
for i in range(0, len(w['warrants'])):
t = w['warrants'][i] # Go through list of citations
print("%d: Warrant Case # %s for ticket issued in %s" % (i, t['warrant_number'], t['zip_code']) ) | {
"repo_name": "xHeliotrope/injustice_dropper",
"path": "parseDict.py",
"copies": "1",
"size": "3890",
"license": "mit",
"hash": 5980209397053860000,
"line_mean": 49.5324675325,
"line_max": 708,
"alpha_frac": 0.6827763496,
"autogenerated": false,
"ratio": 2.958174904942966,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4140951254542966,
"avg_score": null,
"num_lines": null
} |
# Assume `s` is a string of lower case characters.
# Write a program that prints the longest substring of `s` in which the letters occur in alphabetical order.
# For example, if `s` = 'azcbobobegghakl', then your program should print:
# "Longest substring in alphabetical order is: beggh"
# In the case of ties, print the first substring. For example, if s = 'abcbcd', then your program should print:
# "Longest substring in alphabetical order is: abc"
# Setup
# s = 'azcbobobegghakl' # "Longest substring in alphabetical order is: beggh"
s = 'abcbcd' # "Longest substring in alphabetical order is: abc"
# Code
i = 0
longest_substring_start = 0
longest_substring_end = 0
while i < len(s):
start_substring = i
end_substring = i
while end_substring < len(s) - 1:
if ord(s[end_substring]) <= ord(s[end_substring + 1]):
end_substring += 1
else:
break
if longest_substring_end - longest_substring_start < end_substring - start_substring:
longest_substring_start = start_substring
longest_substring_end = end_substring
i += 1
print "Longest substring in alphabetical order is: " + s[longest_substring_start:longest_substring_end + 1] | {
"repo_name": "FylmTM/edX-code",
"path": "MITx_6.00.1x/problem_set_1/3_alphabeticals_substrings.py",
"copies": "1",
"size": "1218",
"license": "mit",
"hash": 5101748456004198000,
"line_mean": 39.6333333333,
"line_max": 111,
"alpha_frac": 0.6863711002,
"autogenerated": false,
"ratio": 3.318801089918256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4505172190118256,
"avg_score": null,
"num_lines": null
} |
# Assumes null_sym_source/target = 0
# unk_sym_source/target = 1
# Verify what is missing from each dict
# How was it saved (0,-1)
##For full vocab
# iv : iv[0] = '<\s>'
# iv : iv[1] : KeyError
## v : v['<\s>'] = 0
## v : v['<s>'] = 0
### len(v) = len(iv) + 1
### Use *_sym_* in state to specify <UNK> and <EOS>
import cPickle
import os
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--state", type=str,
help="Needed only for vocabulary paths")
parser.add_argument("--source-file", type=str,
help="Text to translate")
parser.add_argument("--num-common", type=int)
parser.add_argument("--num-ttables", type=int)
parser.add_argument("--topn-file", type=str,
help="With old indices")
parser.add_argument("--ext", type=str,
help="*.pkl -> *.ext.pkl")
parser.add_argument("--save-vocab-dir", type=str)
parser.add_argument("--new-state", action="store_true", default=False)
parser.add_argument("--new-topn", action="store_true", default=False)
args = parser.parse_args()
with open(args.state, 'rb') as f:
d = cPickle.load(f)
with open(d['indx_word'], 'rb') as f:
old_src_i2w = cPickle.load(f)
with open(d['word_indx'], 'rb') as f:
old_src_w2i = cPickle.load(f)
with open(d['indx_word_target'], 'rb') as f:
old_trg_i2w = cPickle.load(f)
with open(d['word_indx_trgt'], 'rb') as f:
old_trg_w2i = cPickle.load(f)
with open(args.topn_file, 'rb') as f:
topn = cPickle.load(f)
for elt in topn:
topn[elt] = topn[elt][:args.num_ttables]
src_i2w = {}
src_w2i = {}
trg_i2w = {}
trg_w2i = {}
src_i2w[0] = '<\s>'
src_w2i['<s>'] = 0
src_w2i['</s>'] = 0
trg_i2w[0] = '<\s>'
trg_w2i['<s>'] = 0
trg_w2i['</s>'] = 0
# Fill common target words
for i in xrange(2, args.num_common):
trg_i2w[i] = old_trg_i2w[i]
trg_w2i[trg_i2w[i]] = i
cur_src_index = 2
cur_trg_index = args.num_common
with open(args.source_file) as f:
for line in f:
line = line.strip().split()
for word in line:
if (old_src_w2i.get(word, d['n_sym_source']) < d['n_sym_source']) and (word not in src_w2i):
src_w2i[word] = cur_src_index
src_i2w[cur_src_index] = word
cur_src_index += 1
target_old_indices = topn[old_src_w2i[word]]
for index in target_old_indices: # Should always be < d['n_sym_target']
trg_word = old_trg_i2w[index]
if trg_word not in trg_w2i:
trg_w2i[trg_word] = cur_trg_index
trg_i2w[cur_trg_index] = trg_word
cur_trg_index += 1
# w2i was saved with highest pickle protocol, but not i2w
# Do the same here?
if not args.save_vocab_dir:
save_vocab_dir = os.path.dirname(d['indx_word'])
else:
save_vocab_dir = args.save_vocab_dir
with open(os.path.join(save_vocab_dir, os.path.basename(d['indx_word'])[:-3] + args.ext + '.pkl'), 'w') as f:
cPickle.dump(src_i2w, f, 0)
with open(os.path.join(save_vocab_dir, os.path.basename(d['word_indx'])[:-3] + args.ext + '.pkl'), 'wb') as f:
cPickle.dump(src_w2i, f, -1)
with open(os.path.join(save_vocab_dir, os.path.basename(d['indx_word_target'])[:-3] + args.ext + '.pkl'), 'w') as f:
cPickle.dump(trg_i2w, f, 0)
with open(os.path.join(save_vocab_dir, os.path.basename(d['word_indx_trgt'])[:-3] + args.ext + '.pkl'), 'wb') as f:
cPickle.dump(trg_w2i, f, -1)
if args.new_state:
d['indx_word'] = os.path.join(save_vocab_dir, os.path.basename(d['indx_word'])[:-3] + args.ext + '.pkl')
d['word_indx'] = os.path.join(save_vocab_dir, os.path.basename(d['word_indx'])[:-3] + args.ext + '.pkl')
d['indx_word_target'] = os.path.join(save_vocab_dir, os.path.basename(d['indx_word_target'])[:-3] + args.ext + '.pkl')
d['word_indx_trgt'] = os.path.join(save_vocab_dir, os.path.basename(d['word_indx_trgt'])[:-3] + args.ext + '.pkl')
d['n_sym_source'] = len(src_w2i)
d['n_sym_target'] = len(trg_w2i)
with open(args.state[:-3] + args.ext + '.pkl' , 'wb') as f:
cPickle.dump(d, f, -1)
if args.new_topn:
new_topn = {}
for i in xrange(2, len(src_w2i)):
new_topn[i] = [trg_w2i[old_trg_i2w[index]] for index in topn[old_src_w2i[src_i2w[i]]]]
with open(args.topn_file[:-3] + args.ext + '.pkl', 'wb') as f:
cPickle.dump(new_topn, f, -1) | {
"repo_name": "vseledkin/LV_groundhog",
"path": "experiments/nmt/utils/filter_vocab.py",
"copies": "3",
"size": "4386",
"license": "bsd-3-clause",
"hash": -1012615645398388600,
"line_mean": 35.2561983471,
"line_max": 122,
"alpha_frac": 0.5820793434,
"autogenerated": false,
"ratio": 2.6153846153846154,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4697463958784615,
"avg_score": null,
"num_lines": null
} |
""" Assumes softplus activations for gaussian
"""
import tensorflow as tf
import numpy as np
def log_bernoulli(x, logits, eps=0.0, axis=-1):
return log_bernoulli_with_logits(x, logits, eps, axis)
def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1):
if eps > 0.0:
max_val = np.log(1.0 - eps) - np.log(eps)
logits = tf.clip_by_value(logits, -max_val, max_val,
name='clipped_logit')
return -tf.reduce_sum(
tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis)
def log_normal(x, mu, var, eps=0.0, axis=-1):
if eps > 0.0:
var = tf.add(var, eps, name='clipped_var')
return -0.5 * tf.reduce_sum(
tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1):
if eps > 0.0:
qv = tf.add(qv, eps, name='clipped_var1')
pv = tf.add(qv, eps, name='clipped_var2')
return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv +
tf.square(qm - pm) / pv - 1, axis=-1)
| {
"repo_name": "RuiShu/tensorbayes",
"path": "tensorbayes/distributions.py",
"copies": "1",
"size": "1081",
"license": "mit",
"hash": -2308511000460664300,
"line_mean": 36.275862069,
"line_max": 79,
"alpha_frac": 0.5642923219,
"autogenerated": false,
"ratio": 2.689054726368159,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3753347048268159,
"avg_score": null,
"num_lines": null
} |
# Assumes use of the SyncServer_SharedImages database.
from locust import HttpLocust, Locust, TaskSet, task
import json
import uuid
import random
with open('accessTokens.json') as json_file:
tokens = json.load(json_file)
user1AccessToken = tokens["user1AccessToken"]
user2AccessToken = tokens["user2AccessToken"]
user1Id = 1
user2Id = 2
numberOfSharingGroupsPerUser = 5
numberOfCommonSharingGroups = 0 # across users
user1SharingGroups = []
user2SharingGroups = []
def headers(deviceUUID, userAccessToken):
return {
"X-token-type": "GoogleToken",
"access_token": userAccessToken,
"SyncServer-Device-UUID": deviceUUID
}
def makeParams(dict):
result = ""
for key, value in dict.items():
if len(result) == 0:
result += "?"
else:
result += "&"
result += key + "=" + value
return result
# Returns an array of sharingGroupUUID's to which the user belongs.
def sharingGroupsForUser(indexResponse, userId):
sharingGroups = indexResponse["sharingGroups"]
result = []
for sharingGroup in sharingGroups:
sharingGroupUsers = sharingGroup["sharingGroupUsers"]
for sharingGroupUser in sharingGroupUsers:
if sharingGroupUser["userId"] == userId:
result.append(sharingGroup["sharingGroupUUID"])
return result
# Pass in two arrays.
def intersection(sharingGroups1, sharingGroup2):
return list(set(sharingGroups1) & set(sharingGroup2))
# Given set1 = {A, B, C}, set2 = {A, D, E}
# Want: {B, C, D, E}
def exclusion(list1, list2):
s1 = set(list1)
s2 = set(list2)
intersection = s1 & s2
union = s1.union(s2)
result = union.difference(intersection)
return list(result)
# Given set1 = {A, B, C}, set2 = {A}
# Want: {B, C}
def difference(list1, list2):
s1 = set(list1)
s2 = set(list2)
difference = s1.difference(s2)
return list(difference)
def sharingGroupForUser(userId):
if userId == user1Id:
return random.choice(user1SharingGroups)
else:
return random.choice(user2SharingGroups)
def randomUser():
return random.choice([user1Id, user2Id])
def accessTokenForUser(userId):
if userId == user1Id:
return user1AccessToken
else:
return user2AccessToken
class MyTaskSet(TaskSet):
# This gets run once when the task set starts.
def setup(self):
global user1SharingGroups, user2SharingGroups
response = self.generalIndex(user1AccessToken)
user1SharingGroups = sharingGroupsForUser(response, user1Id)
response = self.generalIndex(user2AccessToken)
user2SharingGroups = sharingGroupsForUser(response, user2Id)
commonSharingGroups = intersection(user1SharingGroups, user2SharingGroups)
numberCommonNeeded = numberOfCommonSharingGroups - len(commonSharingGroups)
if numberCommonNeeded < 0:
numberCommonNeeded = 0
if len(user1SharingGroups) < numberOfSharingGroupsPerUser:
print("Creating sharing groups for user1")
additional = []
for x in range(0, numberOfSharingGroupsPerUser - len(user1SharingGroups)):
# Create a sharing group
result = self.createSharingGroup(user1AccessToken)
if result is None:
print("Could not create sharing groups for user1!")
else:
additional.append(result)
user1SharingGroups.extend(additional)
if len(user2SharingGroups) < numberOfSharingGroupsPerUser:
print("Creating sharing groups for user2")
additional = []
for x in range(0, numberOfSharingGroupsPerUser - len(user2SharingGroups)):
# Create a sharing group
result = self.createSharingGroup(user2AccessToken)
if result is None:
print("Could not create sharing groups for user1!")
else:
additional.append(result)
user2SharingGroups.extend(additional)
if numberCommonNeeded > 0:
print("Inviting user 2 to sharing group(s)")
user1Only = difference(user1SharingGroups, commonSharingGroups)
# Assumes that the number of sharing groups per user > number common.
for user1SharingGroup in user1Only:
# Invite user 2 to this sharing group.
sharingInvitation = self.createSharingInvitation(user1AccessToken, user1SharingGroup)
if sharingInvitation is None:
print("Error creating sharing invitation")
exit()
result = self.redeemSharingInvitation(user2AccessToken, sharingInvitation)
if result is None:
print("Error redeeming sharing invitation")
exit()
commonSharingGroups.append(user1SharingGroup)
if len(commonSharingGroups) >= numberOfCommonSharingGroups:
break
print("User 1 sharing groups: " + ' '.join(user1SharingGroups))
print("User 2 sharing groups: " + ' '.join(user2SharingGroups))
print("Common sharing groups: " + ' '.join(commonSharingGroups))
# Returns the new sharingGroupUUID, or None if the request fails.
def createSharingGroup(self, accessToken):
newSharingGroupUUID = str(uuid.uuid1())
params = makeParams({
"sharingGroupUUID": newSharingGroupUUID
})
deviceUUID = str(uuid.uuid1())
resp = self.client.post("/CreateSharingGroup/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on CreateSharingGroup POST")
return None
return newSharingGroupUUID
# Returns sharingInvitationUUID, or None if the request fails.
def createSharingInvitation(self, accessToken, sharingGroupUUID):
params = makeParams({
"sharingGroupUUID": sharingGroupUUID,
"permission": "admin"
})
deviceUUID = str(uuid.uuid1())
resp = self.client.post("/CreateSharingInvitation/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on CreateSharingInvitation POST")
return None
invitationResponse = json.loads(resp.text)
sharingInvitationUUID = invitationResponse.get("sharingInvitationUUID")
return sharingInvitationUUID
# This needs to be executed by a different user than the creating user.
# Returns sharingGroupUUID, or None if the request fails.
def redeemSharingInvitation(self, accessToken, sharingInvitationUUID):
params = makeParams({
"sharingInvitationUUID": sharingInvitationUUID,
"cloudFolderName": "Local.SharedImages.Folder"
})
deviceUUID = str(uuid.uuid1())
resp = self.client.post("/RedeemSharingInvitation/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on RedeemSharingInvitation POST")
return None
redeemResponse = json.loads(resp.text)
sharingGroupUUID = redeemResponse.get("sharingGroupUUID")
return sharingGroupUUID
def generalIndex(self, accessToken):
deviceUUID = str(uuid.uuid1())
resp = self.client.get("/Index/", headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on Index GET")
return None
return json.loads(resp.text)
def indexSharingGroup(self, userAccessToken, deviceUUID, sharingGroupUUID):
params = makeParams({"sharingGroupUUID": sharingGroupUUID})
resp = self.client.get("/Index/" + params, headers=headers(deviceUUID, userAccessToken))
if resp.status_code not in range(200, 300):
print("Error on Index GET")
return None
indexResponse = json.loads(resp.text)
return indexResponse
# Returns True iff operation works
def doneUploads(self, accessToken, masterVersion, deviceUUID, sharingGroupUUID, numTries=0, maxTries=3):
if numTries > maxTries:
print("Error on DoneUploads: Exceeded number of retries")
return False
params = makeParams({
"sharingGroupUUID": sharingGroupUUID,
"masterVersion": str(masterVersion)
})
resp = self.client.post("/DoneUploads/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on DoneUploads POST")
return False
body = json.loads(resp.text)
masterVersion = body.get("masterVersionUpdate")
if masterVersion is not None:
return self.doneUploads(accessToken, masterVersion, deviceUUID, sharingGroupUUID, numTries + 1)
return True
# Returns the updated masterVersion if there is one or None.
def getMasterVersionUpdateInHeader(self, resp):
respParams = None
if resp.headers.get("syncserver-message-params") is None:
print("Error on UploadFile: No header params")
return None
respParams = resp.headers["syncserver-message-params"]
respParamsJSON = json.loads(respParams)
masterVersion = respParamsJSON.get("masterVersionUpdate")
if masterVersion is not None:
return masterVersion
return None
# Returns True iff successful.
def downloadFileAux(self, accessToken, deviceUUID, paramDict, masterVersion, numTries=0, maxTries=3):
if numTries > maxTries:
print("Error on DownloadFile: Exceeded number of retries")
return False
paramDict["masterVersion"] = str(masterVersion)
params = makeParams(paramDict)
resp = self.client.get("/DownloadFile/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on DownloadFile GET")
return False
masterVersion = self.getMasterVersionUpdateInHeader(resp)
if masterVersion is not None:
return self.downloadFileAux(accessToken, deviceUUID, paramDict, masterVersion, numTries + 1)
return True
# Returns working param dictioary iff successful; None if failure.
def uploadFileWithRetries(self, accessToken, deviceUUID, data, paramDict, masterVersion, numTries=0, maxTries=3):
if numTries > maxTries:
print("Error on UploadFile: Exceeded number of retries")
return None
paramDict["masterVersion"] = str(masterVersion)
params = makeParams(paramDict)
resp = self.client.post("/UploadFile/" + params, data=data, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on UploadFile")
return None
masterVersion = self.getMasterVersionUpdateInHeader(resp)
if masterVersion is not None:
return self.uploadFileWithRetries(accessToken, deviceUUID, data, paramDict, masterVersion, numTries + 1)
return paramDict
# Return working upload dictionary if upload with DoneUploads works; None otherwise.
def uploadFileAux(self, accessToken, deviceUUID, sharingGroupUUID):
indexResponse = self.indexSharingGroup(accessToken, deviceUUID, sharingGroupUUID)
if indexResponse is None:
return None
masterVersion = indexResponse["masterVersion"]
fileUUID = str(uuid.uuid1())
paramDict = {
"fileUUID": fileUUID,
"sharingGroupUUID": sharingGroupUUID,
"fileVersion": "0",
"mimeType": "image/jpeg",
"checkSum": "6B5B722C95BC6D5A023B6236486EBB8C".lower()
}
data = None
with open("IMG_2963.jpeg", "r") as f:
data = f.read()
uploadResult = self.uploadFileWithRetries(accessToken, deviceUUID, data, paramDict, masterVersion)
if uploadResult is None:
return None
if self.doneUploads(accessToken, masterVersion, deviceUUID, sharingGroupUUID):
return uploadResult
else:
return None
# Return True iff succeeds (no DoneUploads)
def deleteFileAux(self, accessToken, deviceUUID, paramDict, masterVersion, numTries=0, maxTries=3):
if numTries > maxTries:
print("Error on DeleteFile: Exceeded number of retries")
return False
paramDict["masterVersion"] = str(masterVersion)
params = makeParams(paramDict)
resp = self.client.delete("/UploadDeletion/" + params, headers=headers(deviceUUID, accessToken))
if resp.status_code not in range(200, 300):
print("Error on DeleteFile")
return False
body = json.loads(resp.text)
masterVersion = body.get("masterVersionUpdate")
if masterVersion is not None:
return self.deleteFileAux(accessToken, deviceUUID, paramDict, masterVersion, numTries + 1)
return True
def downloadFileMain(self):
deviceUUID = str(uuid.uuid1())
userId = randomUser()
sharingGroupUUID = sharingGroupForUser(userId)
accessToken = accessTokenForUser(userId)
indexResponse = self.indexSharingGroup(accessToken, deviceUUID, sharingGroupUUID)
if indexResponse is None:
print("Error: Could not get index for downloading.")
return
notDeleted = list(filter(lambda file: not file["deleted"], indexResponse["fileIndex"]))
if len(notDeleted) == 0:
print("Cannot download file: All files deleted")
return
exampleFile = notDeleted[0]
masterVersion = indexResponse["masterVersion"]
paramDict = {
"sharingGroupUUID": sharingGroupUUID,
"fileUUID": exampleFile["fileUUID"],
"fileVersion": str(exampleFile["fileVersion"])
}
if exampleFile.get("appMetaDataVersion") is not None:
paramDict["appMetaDataVersion"] = str(exampleFile["appMetaDataVersion"])
if not self.downloadFileAux(accessToken, deviceUUID, paramDict, masterVersion):
print("ERROR DownloadFile GET")
return
print("SUCCESS DownloadFile GET: User: " + str(userId))
def indexMain(self):
userId = randomUser()
accessToken = accessTokenForUser(userId)
self.generalIndex(accessToken)
print("SUCCESS on Index: User: " + str(userId))
def indexSharingGroupMain(self):
deviceUUID = str(uuid.uuid1())
userId = randomUser()
sharingGroupUUID = sharingGroupForUser(userId)
accessToken = accessTokenForUser(userId)
indexResponse = self.indexSharingGroup(accessToken, deviceUUID, sharingGroupUUID)
if indexResponse is None:
print("Error: Could not get index for indexSharingGroupMain.")
return
def uploadFileMain(self):
userId = randomUser()
accessToken = accessTokenForUser(userId)
sharingGroupUUID = sharingGroupForUser(userId)
deviceUUID = str(uuid.uuid1())
if self.uploadFileAux(accessToken, deviceUUID, sharingGroupUUID) is None:
print("Error on UploadFile")
return
print("SUCCESS on UploadFile: User: " + str(userId))
def deleteFileMain(self):
userId = randomUser()
sharingGroupUUID = sharingGroupForUser(userId)
accessToken = accessTokenForUser(userId)
deviceUUID = str(uuid.uuid1())
uploadResult = self.uploadFileAux(accessToken, deviceUUID, sharingGroupUUID)
if uploadResult is None:
print("Error on DeleteFile: Upload failed")
return
masterVersion = uploadResult["masterVersion"]
# Need to +1 masterVersion because the value we have is after the DoneUploads with the upload.
masterVersion = int(masterVersion)
masterVersion += 1
masterVersion = str(masterVersion)
paramDict = {
"fileUUID": uploadResult["fileUUID"],
"sharingGroupUUID": uploadResult["sharingGroupUUID"],
"fileVersion": uploadResult["fileVersion"]
}
if not self.deleteFileAux(accessToken, deviceUUID, paramDict, masterVersion):
print("Error on DeleteFile")
return
if not self.doneUploads(accessToken, masterVersion, deviceUUID, sharingGroupUUID):
print("Error on DoneUploads/DeleteFile")
return
print("SUCCESS on DeleteFile: User: " + str(userId))
@task(5)
def index(self):
self.indexMain()
@task(5)
def indexSharingGroupTask(self):
self.indexSharingGroupMain()
@task(3)
def downloadFile(self):
self.downloadFileMain()
@task(2)
def uploadFile(self):
self.uploadFileMain()
@task(1)
def deleteFile(self):
self.deleteFileMain()
class MyLocust(HttpLocust):
task_set = MyTaskSet
# https://docs.locust.io/en/stable/writing-a-locustfile.html
# These are the minimum and maximum time respectively, in milliseconds, that a simulated user will wait between executing each task.
min_wait = 5000
max_wait = 15000
| {
"repo_name": "crspybits/SyncServerII",
"path": "LoadTesting/locustfile.py",
"copies": "1",
"size": "17636",
"license": "mit",
"hash": -3190030205481814500,
"line_mean": 38.5426008969,
"line_max": 136,
"alpha_frac": 0.6421524155,
"autogenerated": false,
"ratio": 4.250662810315739,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5392815225815739,
"avg_score": null,
"num_lines": null
} |
#assumes you have gnu sed, osx sed might need slight syntax changeo
#c.f. http://unix.stackexchange.com/questions/112023/how-can-i-replace-a-string-in-a-files
#written by shen-noether monero research labs
import os #for copying and sed etc.
import glob #for copy files
import textwrap #for comments etc
print("make sure you have cat and grep installed")
print("also assumes gnu sed syntax, c.f. :http://unix.stackexchange.com/questions/112023/how-can-i-replace-a-string-in-a-files")
print("I believe osx may have slightly different version of sed")
print("maybe someone smart can replace the sed with perl..")
a = ""
license = textwrap.dedent("""\
// Copyright (c) 2014-2016, The Monero Project
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification, are
// permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice, this list of
// conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
// of conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its contributors may be
// used to endorse or promote products derived from this software without specific
// prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// Parts of this file are originally copyright (c) 2012-2013 The Cryptonote developers
""")
crypto_ops_includes = textwrap.dedent("""\
#include <assert.h>
#include <stdint.h>
#include "warnings.h"
#include "crypto-ops.h"
DISABLE_VS_WARNINGS(4146 4244)
""")
predeclarations = textwrap.dedent("""\
/* Predeclarations */
static void fe_mul(fe, const fe, const fe);
static void fe_sq(fe, const fe);
static void fe_tobytes(unsigned char *, const fe);
static int fe_isnonzero(const fe);
static void ge_madd(ge_p1p1 *, const ge_p3 *, const ge_precomp *);
static void ge_msub(ge_p1p1 *, const ge_p3 *, const ge_precomp *);
static void ge_p2_0(ge_p2 *);
static void ge_p3_dbl(ge_p1p1 *, const ge_p3 *);
static void ge_sub(ge_p1p1 *, const ge_p3 *, const ge_cached *);
static void fe_divpowm1(fe, const fe, const fe);
""")
fe_comments = textwrap.dedent("""\
/*
fe means field element.
Here the field is \Z/(2^255-19).
An element t, entries t[0]...t[9], represents the integer
t[0]+2^26 t[1]+2^51 t[2]+2^77 t[3]+2^102 t[4]+...+2^230 t[9].
Bounds on each t[i] vary depending on context.
*/
""")
sc_comments = textwrap.dedent("""\
/*
*
* sc code
*
*
The set of scalars is \Z/l
where l = 2^252 + 27742317777372353535851937790883648493.
This is the order of the curve ed25519.
The point is that if a is a scalar and P is a point,
and b is congruent to a mod l, then aP = bP.
Thus, reducing mod l can possibly give you a smaller scalar,
so your elliptic curve operations take less time
*/
""")
ge_comments = textwrap.dedent("""\
/*
*
* ge code
*
*
ge means group element.
Here the group is the set of pairs (x,y) of field elements (see fe.h)
satisfying -x^2 + y^2 = 1 + d x^2y^2
where d = -121665/121666.
Representations:
ge_p2 (projective): (X:Y:Z) satisfying x=X/Z, y=Y/Z
ge_p3 (extended): (X:Y:Z:T) satisfying x=X/Z, y=Y/Z, XY=ZT
ge_p1p1 (completed): ((X:Z),(Y:T)) satisfying x=X/Z, y=Y/T
ge_precomp (Duif): (y+x,y-x,2dxy)
*/
""")
xmr_comments = textwrap.dedent("""\
/*
*
* xmr specific code
*
*
This code is from the original CryptoNote.
Some additional functions were needed to compute ring signatures
which are not needed for signing.
Note that sc_sub and sc_mulsub have been given their own file
since these have been rewritten
*/
""")
def qhasmToC(fi, header, out):
#replaces mentiones of "header" in "fi" with output in "out"
#also removes qhasm comments
out1 = out+".tmp"
rem_qhasm = " |grep -v 'qhasm' |grep -v ' asm'"
com = "sed -e '/#include \""+header+"\"/ {' -e 'r "+header+"' -e 'd' -e '}' "+fi+rem_qhasm+" > "+out1
com2 = "awk 'NF' "+out1+" > "+out
print(com)
os.system(com)
print(com2)
os.system(com2)
os.remove(out1) #temporary
while (a != "m") and (a != "m") and (a != "c"):
a = raw_input("Make / Clean/ Quit m / c / q?")
if a == "m":
print("making crypto-ops.c and crypto-ops.h")
#ref10 header files
#ref10 c files
#fe things
#still need to do d2, d, sqrtm1
print("making fe.c")
print(fe_comments)
fe = glob.glob("fe*.c")
for g in fe:
os.system("cp "+g+" "+g.replace("fe", "fe.monero."))
qhasmToC("fe_pow22523.c", "pow22523.h", "fe.monero._pow22523.c")
qhasmToC("fe_invert.c", "pow225521.h", "fe.monero._invert.c")
os.system("rm fe.monero._isnonzero.c") #since it's modified, it's in xmrSpecificOld
os.system("cat fe.monero.*.c | grep -v '^#include' > fe.monero.c")
#sc things
print("\nmaking sc.c")
print(sc_comments)
#so you don't get multiple "loads"
os.system("tail -n +24 sc_reduce.c > sc.monero._reduce.c") #also good on linux
os.system("tail -n +24 sc_muladd.c > sc.monero._muladd.c")
os.system("tail -n +31 sc_sub.xmr.c > sc.monero._sub.xmr.c") #careful with the tails if you change these files!
os.system("cat sc.monero.*.c | grep -v '^#include' > sc.monero.c")
#ge stuff
print("making ge.c")
ge = glob.glob("ge*.c")
for g in ge:
os.system("cp "+g+" "+g.replace("ge", "ge.monero."))
print(ge_comments)
#need to substitute the below lines for their .h files in the appropriate places
qhasmToC("ge_add.c", "ge_add.h", "ge.monero._add.c")
qhasmToC("ge_madd.c", "ge_madd.h", "ge.monero._madd.c")
qhasmToC("ge_sub.c", "ge_sub.h", "ge.monero._sub.c")
qhasmToC("ge_msub.c", "ge_msub.h", "ge.monero._msub.c")
qhasmToC("ge_p2_dbl.c", "ge_p2_dbl.h", "ge.monero._p2_dbl.c")
qhasmToC("ge_frombytes.c", "d.h", "ge.monero._frombytes.c")
qhasmToC("ge.monero._frombytes.c", "sqrtm1.h", "ge.monero._frombytes.c")
qhasmToC("ge_p3_to_cached.c", "d2.h", "ge.monero._p3_to_cached.c")
#also ge_double_scalarmult needs base2.h for ge_precomp Bi
#note, base2.h is a large file!
#also in ge_scalarmult_base ge_precomp base needs base.h included
qhasmToC("ge_double_scalarmult.c", "base2.h", "ge.monero._double_scalarmult.c")
qhasmToC("ge_scalarmult_base.c", "base.h", "ge.monero._scalarmult_base.c")
#qhasmToC("ge.monero._scalarmult_base.c", "base.h", "ge.monero._scalarmult_base.c")
os.system("sed -i 's/ cmov/ ge_precomp_cmov/g' ge.monero._scalarmult_base.c")
os.system("cat ge.monero.*.c | grep -v '^#include' > ge.monero.c")
print("making crypto-ops.c")
#sqrtm1 things
#comments
with open("fe.monero.comments", "w") as text_file:
text_file.write(fe_comments)
with open("ge.monero.comments", "w") as text_file:
text_file.write(ge_comments)
with open("sc.monero.comments", "w") as text_file:
text_file.write(sc_comments)
with open("xmr.monero.comments", "w") as text_file:
text_file.write(xmr_comments)
with open("xmr.monero.predeclarations", "w") as text_file:
text_file.write(predeclarations)
#license
with open("monero.license", "w") as text_file:
text_file.write(license)
#crypto-ops.c includes
with open("crypto-ops.monero.includes", "w") as text_file:
text_file.write(crypto_ops_includes)
#note you may have duplicates of load_3, load_4 and possibly some other functions ...
os.system("cat monero.license crypto-ops.monero.includes xmr.monero.predeclarations fe.monero.comments fe.monero.c sc.monero.comments sc.monero.c ge.monero.comments ge.monero.c xmr.monero.comments xmrSpecificOld.c > crypto-ops.c")
#monero specific header files
#print("making crypto-ops-tmp.h")
#os.system("cat fe.h ge.h sc.h |grep -v crypto_sign_ed25519 |grep -v fe.h > crypto-ops-tmp.h")
#we'll just use the old header crypto-ops.h
#replace crypto_ints
os.system("sed -i 's/crypto_int32/int32_t/g' crypto-ops.c")
os.system("sed -i 's/crypto_int64/int64_t/g' crypto-ops.c")
os.system("sed -i 's/crypto_uint32/uint32_t/g' crypto-ops.c")
os.system("sed -i 's/crypto_uint64/uint64_t/g' crypto-ops.c")
#cleaning up
os.system("rm *monero*")
#monero specific c files
if a == "c":
#turn the directory back into ref10
os.system("rm *monero*")
os.system("rm crypto-ops.c")
| {
"repo_name": "ranok/bitmonero",
"path": "src/crypto/crypto_ops_builder/ref10CommentedCombined/MakeCryptoOps.py",
"copies": "2",
"size": "9681",
"license": "bsd-3-clause",
"hash": 1343448710920573400,
"line_mean": 36.091954023,
"line_max": 234,
"alpha_frac": 0.64249561,
"autogenerated": false,
"ratio": 3.052980132450331,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46954757424503313,
"avg_score": null,
"num_lines": null
} |
# Assume you are given two dictionaries d1 and d2, each with integer keys and
# integer values. You are also given a function f, that takes in two integers,
# performs an unknown operation on them, and returns a value.
# Write a function called dict_interdiff that takes in two dictionaries (d1 and
# d2). The function will return a tuple of two dictionaries: a dictionary of
# the intersect of d1 and d2 and a dictionary of the difference of d1 and d2,
# calculated as follows:
# intersect: The keys to the intersect dictionary are keys that are common
# in both d1 and d2. To get the values of the intersect dictionary, look at
# the common keys in d1 and d2 and apply the function f to these keys' values
# -- the value of the common key in d1 is the first parameter to the function
# and the value of the common key in d2 is the second parameter to the
# function. Do not implement f inside your dict_interdiff code -- assume it is
# defined outside.
# difference: a key-value pair in the difference dictionary is (a) every
# key-value pair in d1 whose key appears only in d1 and not in d2 or (b) every
# key-value pair in d2 whose key appears only in d2 and not in d1.
# Here are two examples:
# If f(a, b) returns a + b
# d1 = {1:30, 2:20, 3:30, 5:80}
# d2 = {1:40, 2:50, 3:60, 4:70, 6:90}
# then dict_interdiff(d1, d2) returns ({1: 70, 2: 70, 3: 90}, {4: 70, 5: 80, 6: 90})
# If f(a, b) returns a > b
# d1 = {1:30, 2:20, 3:30}
# d2 = {1:40, 2:50, 3:60}
# then dict_interdiff(d1, d2) returns ({1: False, 2: False, 3: False}, {})
def dict_interdiff(d1, d2):
'''
d1, d2: dicts whose keys and values are integers
Returns a tuple of dictionaries according to the instructions above
'''
intersect = {}
difference = {}
for key, value in d1.items():
if key in d2.keys():
intersect[key] = f(value, d2[key])
else:
difference[key] = value
for key, value in d2.items():
if key not in d1.keys():
difference[key] = value
return (intersect, difference) | {
"repo_name": "emyarod/OSS",
"path": "1_intro/6.00.1x/Quiz/Problem 7.py",
"copies": "1",
"size": "2101",
"license": "mit",
"hash": -2661961493018195000,
"line_mean": 41.04,
"line_max": 88,
"alpha_frac": 0.6554021894,
"autogenerated": false,
"ratio": 3.2523219814241484,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9398659008161898,
"avg_score": 0.0018130325324501816,
"num_lines": 50
} |
## Assume you have run vr_kuka_setup and have default scene set up
# Require p.setInternalSimFlags(0) in kuka_setup
import pybullet as p
import math
# import numpy as np
p.connect(p.SHARED_MEMORY)
kuka = 3
kuka_gripper = 7
POSITION = 1
ORIENTATION = 2
ANALOG = 3
BUTTONS = 6
THRESHOLD = .5
LOWER_LIMITS = [-.967, -2.0, -2.96, 0.19, -2.96, -2.09, -3.05]
UPPER_LIMITS = [.96, 2.0, 2.96, 2.29, 2.96, 2.09, 3.05]
JOINT_RANGE = [5.8, 4, 5.8, 4, 5.8, 4, 6]
REST_POSE = [0, 0, 0, math.pi / 2, 0, -math.pi * 0.66, 0]
JOINT_DAMP = [0.1] * 10
REST_JOINT_POS = [-0., -0., 0., 1.570793, 0., -1.036725, 0.000001]
MAX_FORCE = 500
KUKA_GRIPPER_REST_POS = [0., -0.011130, -0.206421, 0.205143, -0.009999, 0., -0.010055, 0.]
KUKA_GRIPPER_CLOZ_POS = [
0.0, -0.047564246423083795, 0.6855956234759611, -0.7479294372303137, 0.05054599996976922, 0.0,
0.049838105678835724, 0.0
]
def euc_dist(posA, posB):
dist = 0.
for i in range(len(posA)):
dist += (posA[i] - posB[i])**2
return dist
p.setRealTimeSimulation(1)
controllers = [e[0] for e in p.getVREvents()]
for j in range(p.getNumJoints(kuka_gripper)):
print(p.getJointInfo(kuka_gripper, j))
while True:
events = p.getVREvents()
for e in (events):
# Only use one controller
###########################################
# This is important: make sure there's only one VR Controller!
if e[0] == controllers[0]:
break
sq_len = euc_dist(p.getLinkState(kuka, 6)[0], e[POSITION])
# A simplistic version of gripper control
#@TO-DO: Add slider for the gripper
#for i in range(p.getNumJoints(kuka_gripper)):
i = 4
p.setJointMotorControl2(kuka_gripper,
i,
p.POSITION_CONTROL,
targetPosition=e[ANALOG] * 0.05,
force=10)
i = 6
p.setJointMotorControl2(kuka_gripper,
i,
p.POSITION_CONTROL,
targetPosition=e[ANALOG] * 0.05,
force=10)
if sq_len < THRESHOLD * THRESHOLD:
eef_pos = e[POSITION]
eef_orn = p.getQuaternionFromEuler([0, -math.pi, 0])
joint_pos = p.calculateInverseKinematics(kuka,
6,
eef_pos,
eef_orn,
lowerLimits=LOWER_LIMITS,
upperLimits=UPPER_LIMITS,
jointRanges=JOINT_RANGE,
restPoses=REST_POSE,
jointDamping=JOINT_DAMP)
for i in range(len(joint_pos)):
p.setJointMotorControl2(kuka,
i,
p.POSITION_CONTROL,
targetPosition=joint_pos[i],
targetVelocity=0,
positionGain=0.15,
velocityGain=1.0,
force=MAX_FORCE)
else:
# Set back to original rest pose
for jointIndex in range(p.getNumJoints(kuka)):
p.setJointMotorControl2(kuka, jointIndex, p.POSITION_CONTROL, REST_JOINT_POS[jointIndex],
0)
| {
"repo_name": "MadManRises/Madgine",
"path": "shared/bullet3-2.89/examples/pybullet/examples/vr_kuka_control.py",
"copies": "2",
"size": "3423",
"license": "mit",
"hash": 8916091336469085000,
"line_mean": 32.8910891089,
"line_max": 98,
"alpha_frac": 0.4934268186,
"autogenerated": false,
"ratio": 3.26,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47534268185999995,
"avg_score": null,
"num_lines": null
} |
# assuming non-overlapping and initially sorted
class Solution:
def insert(self, intervals: List[List[int]], newInterval: List[int]) -> List[List[int]]:
found = False
# todo: optimize with binary search, since they're sorted
for i in range(len(intervals)):
# insert case:
if newInterval[1] < intervals[i][0]:
found = True
intervals.insert(i,newInterval)
break
# merge case:
if newInterval[0] <= intervals[i][1]:
found = True
intervals[i][0] = min(newInterval[0], intervals[i][0])
intervals[i][1] = max(newInterval[1], intervals[i][1])
# merge with interval(s) ahead too:
while (len(intervals) > i + 1) and (newInterval[1] >= intervals[i+1][0]):
intervals[i][1] = max(newInterval[1], intervals[i+1][1])
del intervals[i+1]
break
# insert last case:
if not found:
intervals.append(newInterval)
return intervals
| {
"repo_name": "SelvorWhim/competitive",
"path": "LeetCode/InsertInterval.py",
"copies": "1",
"size": "1145",
"license": "unlicense",
"hash": 878556082735027300,
"line_mean": 37.1666666667,
"line_max": 92,
"alpha_frac": 0.5065502183,
"autogenerated": false,
"ratio": 4.35361216730038,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.536016238560038,
"avg_score": null,
"num_lines": null
} |
# Assuming that we are given
# rho rho_t rho_x rho_xx rho_xy rho_xz rho_y rho_yy rho_yz rho_z rho_zz
# u u_t u_x u_xx u_xy u_xz u_y u_yy u_yz u_z u_zz
# v v_t v_x v_xx v_xy v_xz v_y v_yy v_yz v_z v_zz
# w w_t w_x w_xx w_xy w_xz w_y w_yy w_yz w_z w_zz
# T T_t T_x T_xx T_xy T_xz T_y T_yy T_yz T_z T_zz
# and the coefficients
# alpha beta gamma Ma Pr Re
# compute the source terms
# Q_rho Q_rhou Q_rhov Q_rhow Q_rhoe
# necessary to force the solution rho, u, v, w, and T.
# Computations stemming from the constitutive relationships
e = T / gamma / (gamma - 1) + Ma * Ma * (u*u + v*v + w*w ) / 2
e_x = T_x / gamma / (gamma - 1) + Ma * Ma * (u*u_x + v*v_x + w*w_x)
e_y = T_y / gamma / (gamma - 1) + Ma * Ma * (u*u_y + v*v_y + w*w_y)
e_z = T_z / gamma / (gamma - 1) + Ma * Ma * (u*u_z + v*v_z + w*w_z)
e_t = T_t / gamma / (gamma - 1) + Ma * Ma * (u*u_t + v*v_t + w*w_t)
p = (rho * T ) / gamma
p_x = (rho_x * T + rho * T_x) / gamma
p_y = (rho_y * T + rho * T_y) / gamma
p_z = (rho_z * T + rho * T_z) / gamma
mu = pow(T, beta)
mu_x = beta * pow(T, beta - 1) * T_x
mu_y = beta * pow(T, beta - 1) * T_y
mu_z = beta * pow(T, beta - 1) * T_z
lambda_ = (3 * alpha - 2) * mu / 3 # "lambda" is a Python keyword
lambda_x = (3 * alpha - 2) * mu_x / 3 # Strange order of operations allows...
lambda_y = (3 * alpha - 2) * mu_y / 3 # ...arbitrary precision library to...
lambda_z = (3 * alpha - 2) * mu_z / 3 # ...handle (alpha-2/3) factor correctly
qx = - 1 / Re / Pr / (gamma - 1) * mu * T_x
qy = - 1 / Re / Pr / (gamma - 1) * mu * T_y
qz = - 1 / Re / Pr / (gamma - 1) * mu * T_z
qx_x = - 1 / Re / Pr / (gamma - 1) * (mu_x * T_x + mu * T_xx)
qy_y = - 1 / Re / Pr / (gamma - 1) * (mu_y * T_y + mu * T_yy)
qz_z = - 1 / Re / Pr / (gamma - 1) * (mu_z * T_z + mu * T_zz)
# Computations stemming from the compressible, Newtonian fluid model
rhou = rho * u
rhov = rho * v
rhow = rho * w
rhoe = rho * e
rhou_x = rho_x * u + rho * u_x
rhov_y = rho_y * v + rho * v_y
rhow_z = rho_z * w + rho * w_z
rhou_t = rho_t * u + rho * u_t
rhov_t = rho_t * v + rho * v_t
rhow_t = rho_t * w + rho * w_t
rhoe_t = rho_t * e + rho * e_t
rhouu_x = (rho_x * u * u) + (rho * u_x * u) + (rho * u * u_x)
rhouv_y = (rho_y * u * v) + (rho * u_y * v) + (rho * u * v_y)
rhouw_z = (rho_z * u * w) + (rho * u_z * w) + (rho * u * w_z)
rhouv_x = (rho_x * u * v) + (rho * u_x * v) + (rho * u * v_x)
rhovv_y = (rho_y * v * v) + (rho * v_y * v) + (rho * v * v_y)
rhovw_z = (rho_z * v * w) + (rho * v_z * w) + (rho * v * w_z)
rhouw_x = (rho_x * u * w) + (rho * u_x * w) + (rho * u * w_x)
rhovw_y = (rho_y * v * w) + (rho * v_y * w) + (rho * v * w_y)
rhoww_z = (rho_z * w * w) + (rho * w_z * w) + (rho * w * w_z)
rhoue_x = (rho_x * u * e) + (rho * u_x * e) + (rho * u * e_x)
rhove_y = (rho_y * v * e) + (rho * v_y * e) + (rho * v * e_y)
rhowe_z = (rho_z * w * e) + (rho * w_z * e) + (rho * w * e_z)
tauxx = mu * (u_x + u_x) + lambda_ * (u_x + v_y + w_z)
tauyy = mu * (v_y + v_y) + lambda_ * (u_x + v_y + w_z)
tauzz = mu * (w_z + w_z) + lambda_ * (u_x + v_y + w_z)
tauxy = mu * (u_y + v_x)
tauxz = mu * (u_z + w_x)
tauyz = mu * (v_z + w_y)
tauxx_x = ( mu_x * (u_x + u_x ) + lambda_x * (u_x + v_y + w_z )
+ mu * (u_xx + u_xx) + lambda_ * (u_xx + v_xy + w_xz) )
tauyy_y = ( mu_y * (v_y + v_y ) + lambda_y * (u_x + v_y + w_z )
+ mu * (v_yy + v_yy) + lambda_ * (u_xy + v_yy + w_yz) )
tauzz_z = ( mu_z * (w_z + w_z ) + lambda_z * (u_x + v_y + w_z )
+ mu * (w_zz + w_zz) + lambda_ * (u_xz + v_yz + w_zz) )
tauxy_x = mu_x * (u_y + v_x) + mu * (u_xy + v_xx)
tauxy_y = mu_y * (u_y + v_x) + mu * (u_yy + v_xy)
tauxz_x = mu_x * (u_z + w_x) + mu * (u_xz + w_xx)
tauxz_z = mu_z * (u_z + w_x) + mu * (u_zz + w_xz)
tauyz_y = mu_y * (v_z + w_y) + mu * (v_yz + w_yy)
tauyz_z = mu_z * (v_z + w_y) + mu * (v_zz + w_yz)
pu_x = p_x * u + p * u_x
pv_y = p_y * v + p * v_y
pw_z = p_z * w + p * w_z
utauxx_x = u_x * tauxx + u * tauxx_x
vtauxy_x = v_x * tauxy + v * tauxy_x
wtauxz_x = w_x * tauxz + w * tauxz_x
utauxy_y = u_y * tauxy + u * tauxy_y
vtauyy_y = v_y * tauyy + v * tauyy_y
wtauyz_y = w_y * tauyz + w * tauyz_y
utauxz_z = u_z * tauxz + u * tauxz_z
vtauyz_z = v_z * tauyz + v * tauyz_z
wtauzz_z = w_z * tauzz + w * tauzz_z
Q_rho = rho_t + rhou_x + rhov_y + rhow_z
Q_rhou = ( rhou_t + rhouu_x + rhouv_y + rhouw_z
+ p_x / (Ma * Ma)
- (1 / Re) * (tauxx_x + tauxy_y + tauxz_z) )
Q_rhov = ( rhov_t + rhouv_x + rhovv_y + rhovw_z
+ p_y / (Ma * Ma)
- (1 / Re) * (tauxy_x + tauyy_y + tauyz_z) )
Q_rhow = ( rhow_t + rhouw_x + rhovw_y + rhoww_z
+ p_z / (Ma * Ma)
- (1 / Re) * (tauxz_x + tauyz_y + tauzz_z) )
Q_rhoe = ( rhoe_t + rhoue_x + rhove_y + rhowe_z
+ pu_x + pv_y + pw_z + qx_x + qy_y + qz_z
- (Ma * Ma / Re) * ( utauxx_x + vtauxy_x + wtauxz_x
+ utauxy_y + vtauyy_y + wtauyz_y
+ utauxz_z + vtauyz_z + wtauzz_z) )
| {
"repo_name": "nicholasmalaya/arcanus",
"path": "disputatio/dissertation/mms/forcing.py",
"copies": "2",
"size": "5268",
"license": "mit",
"hash": -912041578386577200,
"line_mean": 46.0357142857,
"line_max": 79,
"alpha_frac": 0.4345102506,
"autogenerated": false,
"ratio": 1.9826872412495296,
"config_test": false,
"has_no_keywords": true,
"few_assignments": false,
"quality_score": 0.8334576968168148,
"avg_score": 0.016524104736276368,
"num_lines": 112
} |
# assuming this is run by pytest: http://doc.pytest.org/en/latest/
# e.g.
# py.test tests/unittests.py
# or
# py.test -s tests/unittests.py
from __future__ import print_function
import sys,os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), ".."))
import phrasemachine as pm
import pytest
def test_custom_regex():
out = pm.get_phrases(tokens=["the", "red", "car"], postags=["D", "A", "N"], regex='DA')
assert "the red" in set(out["counts"].keys()), "custom regex should work"
def test_ark_tags():
'''
If the user has provided coarsened tags in the 5 tag system (e.g. the ran ark tagger)
then phrasemachine should still work
'''
phrases = pm.get_phrases(tokens=["red", "car"], postags=["A", "N"])
assert "red car" in set(phrases["counts"].keys()), "used to break before coarsened tags added to coarsemap in phrasemachine.py"
def test_pos():
def go(tags, **kwargs):
pp = pm.get_phrases(postags=tags, output='token_spans', **kwargs)
return pp['token_spans']
tags = "JJ NN NN".split()
assert set(go(tags)) == set([ (0,2), (0,3), (1,3) ])
tags = "VB JJ NN NN".split()
assert set(go(tags)) == set([ (1,3), (1,4), (2,4) ])
tags = "VB JJ NN".split()
assert set(go(tags)) == set([ (1,3) ])
tags = "NN".split()
assert set(go(tags)) == set()
def test_minmaxlen():
tags = "NN NN NN".split()
assert (0,3) in pm.extract_ngram_filter(tags)
assert (0,3) in pm.extract_ngram_filter(tags, maxlen=3)
assert (0,3) not in pm.extract_ngram_filter(tags, maxlen=2)
assert len(pm.extract_ngram_filter(tags, maxlen=0)) == 0
assert (0,) not in pm.extract_ngram_filter(tags), "default should exclude unigrams"
assert (0,) not in pm.extract_ngram_filter(tags, minlen=2)
assert (0,1) not in pm.extract_ngram_filter(tags, minlen=3)
assert (0,3) in pm.extract_ngram_filter(tags, minlen=3)
def test_basic_tagging():
# Have to pick an example easy for the tagger
pp = pm.get_phrases("Red stock market",output=['pos','tokens','token_spans','counts'])
assert pp['pos']=="JJ NN NN".split(), "this test failure may be due to tagger uncertainty... though unlikely..."
assert set(pp['token_spans']) == set([ (0,2), (0,3), (1,3) ])
assert len(pp['counts'])==3
assert pp['counts']['red stock']==1
assert pp['counts']['red stock market']==1
assert pp['counts']['stock market']==1
def test_len_toks_equals_len_tags_nltk():
text = "Hi I like this. OK another sentence."
docutils = pytest.importorskip("nltk")
# http://doc.pytest.org/en/latest/skipping.html#skipping-on-a-missing-import-dependency
x = pm.get_stdeng_nltk_tagger().tag_text(text)
assert len(x['tokens']) == len(x['pos'])
def test_bad_counts_example():
phrases = pm.get_phrases("Social security is a law. Gravity is one too. Cheeseburgers are tasty.")
assert phrases['counts']['social security'] == 1
def test_bad_counts_example_2():
phrases = pm.get_phrases("Social security is a law. Gravity is one too. Cheeseburgers are tasty. Social security is in a lockbox.")
assert phrases['counts']['social security'] == 2
def test_multisentence():
pp = pm.get_phrases("blue table. blue table. blue table.")
print(pp)
assert len(pp['counts'])==1 ## should be just 'blue table'. if buggy, it can pick up spans across sentences
| {
"repo_name": "slanglab/phrasemachine",
"path": "py/tests/test_all.py",
"copies": "1",
"size": "3370",
"license": "mit",
"hash": -8456857334137674000,
"line_mean": 39.119047619,
"line_max": 135,
"alpha_frac": 0.6430267062,
"autogenerated": false,
"ratio": 3.052536231884058,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4195562938084058,
"avg_score": null,
"num_lines": null
} |
# assuming words contain alphabetical only
from collections import Counter
ORD_A = ord('a')
ORD_Z = ord('z')
ALPHABET_LEN = ORD_Z - ORD_A + 1
# alphabetic, lowercase
def letter_counter(word):
c = [0]*ALPHABET_LEN
for l in word.lower():
i = ord(l)
if i >= ORD_A and i <= ORD_Z:
c[i - ORD_A] += 1
return c
class Solution:
def shortestCompletingWord(self, licensePlate, words):
"""
:type licensePlate: str
:type words: List[str]
:rtype: str
"""
c0 = letter_counter(licensePlate)
first = True
shortest = ""
for word in words:
if (not first) and len(word) >= len(shortest):
continue
c1 = letter_counter(word)
valid_so_far = True
for i in range(ALPHABET_LEN):
if c1[i] < c0[i]: # not enough of a required letter
valid_so_far = False
break
if valid_so_far:
first = False
shortest = word
return shortest
| {
"repo_name": "SelvorWhim/competitive",
"path": "LeetCode/ShortestCompletingWord.py",
"copies": "1",
"size": "1087",
"license": "unlicense",
"hash": 884044485348578800,
"line_mean": 26.8717948718,
"line_max": 67,
"alpha_frac": 0.5124195032,
"autogenerated": false,
"ratio": 3.7874564459930316,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9780298454596683,
"avg_score": 0.0039154989192696586,
"num_lines": 39
} |
### Assumption: All kids should be prepared to work together and have something to learn from each other
# Assign students partners
# -each kid
# *skill: math, reading, science, social studies, other
# *skill level: high, medium, low
# *social: leader
# -support: if paired with {student name}
# -evaluation
# -kid of partners
# -teacher of groups
# import random.choice
import random
import math
from sys import exit
import csv
class Teacher():
def __init__(self, first_name,last_name): #pass teacher's first and last name
self.first_name = first_name
self.last_name = last_name
self.classrooms = dict()
def add_classroom(self, classroom_data):
self.classrooms["ClassroomID_" + classroom_data[0]] = Classroom(classroom_data[1])
class Classroom():
def __init__(self, classname): #need a name for the class e.g., homeroom
self.class_name = classname
self.students = dict()
def add_student(self,student_data):
self.students["StudentID_" + student_data[0]] = Student(student_data[1], student_data[2]) #Pass student_firstname, student_lastname
def list_participants(self): #uses students attribute to print list of students who are present and participating
participant_list = []
for k,v in self.students.items():
if v.attendance == True and v.participation == True:
participant_list.append(v.studentname)
return participant_list
def group_students(self):
groups_wanted = input("How many groups do you want?")
groups_wanted = int(groups_wanted)
### Quick checks to see if grouping makes sense
num_of_kids = len(self.list_participants())
if num_of_kids <= 1:
print("There aren't at least 2 students in your class")
exit()
if groups_wanted * 2 > num_of_kids:
print("There aren't enough students. Try having fewer groups.")
exit()
student_groups = []
available = self.list_participants()
### When students can be evenly divided into groups
if num_of_kids % groups_wanted == 0:
num_in_group = int((num_of_kids) / (groups_wanted))
for i in range(groups_wanted):
picked_kids = random.sample(available, num_in_group)
student_groups.append(picked_kids)
for kid in picked_kids:
available.remove(kid)
### When uneven num of students
else:
approx_groups = ((num_of_kids) / (groups_wanted))
decision_point = round(approx_groups - int(approx_groups), 1) #decision_point to round groups up/down
if decision_point <= .5:
num_in_group = math.floor(approx_groups)
else:
num_in_group = math.ceil(approx_groups)
for i in range(groups_wanted-1): # groups_wanted -1, leaves the last group with more/less students
picked_kids = random.sample(available, num_in_group)
student_groups.append(picked_kids)
for kid in picked_kids:
available.remove(kid)
student_groups.append(available)
for group in student_groups:
print(group)
class Student():
### Student Characteristics
def __init__(self, first, last): # student's first and last name needed
self.first = first
self.last = last
self.studentname = self.first + "_" + self.last
self.attendance = True #default attendance to present
self.participation = True #default participation to active
def change_attendance(self):
self.attendance = not self.attendance
def change_participation(self):
self.participation = not self.participation
teachers = dict()
classroom_teacher = dict()
with open('teachers.csv') as teachersfile, open('classrooms.csv') as classroomsfile, open('students.csv') as studentssfile:
teachersCSV = csv.reader(teachersfile, delimiter = ",")
next(teachersCSV, None)
classroomsCSV = csv.reader(classroomsfile, delimiter=",")
next(classroomsCSV, None)
studentsCSV = csv.reader(studentssfile, delimiter=",")
next(studentsCSV, None)
for row in teachersCSV:
teachers["TeacherID_"+row[0]] = Teacher(row[1], row[2])
for row in classroomsCSV:
teacherID = "TeacherID_" + row[2]
if teacherID not in teachers.keys():
print("Error - the teacher is not in the teachers.CSV", row)
exit()
teachers[teacherID].add_classroom(row)
classroom_teacher["ClassroomID_"+row[0]] = teacherID
for row in studentsCSV:
classroomID = "ClassroomID_" + row[4]
if classroomID not in classroom_teacher.keys():
print("Error - the classroom is not in the classrooms.CSV", row)
exit()
teachers[classroom_teacher[classroomID]].classrooms[classroomID].add_student(row)
teachers["TeacherID_1"].classrooms["ClassroomID_1"].students["StudentID_20163"].change_participation()
teachers["TeacherID_1"].classrooms["ClassroomID_1"].group_students()
"""
for k,v in teachers.items():
print(k,v.first_name,v.last_name)
for k,v in v.classrooms.items():
print(k,v.class_name, v.list_participants)
for k,v in v.students.items():
print(k,v.first,v.last,v.attendance,v.participation)
""" | {
"repo_name": "DanaMercury/Buddies",
"path": "buddies.py",
"copies": "1",
"size": "5574",
"license": "cc0-1.0",
"hash": -6255100610587421000,
"line_mean": 37.4482758621,
"line_max": 142,
"alpha_frac": 0.6157158235,
"autogenerated": false,
"ratio": 3.7409395973154362,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4856655420815436,
"avg_score": null,
"num_lines": null
} |
# a stacked bar plot with errorbars
import numpy as np
import matplotlib.pyplot as plt
N = 5
menMeans = (20, 35, 30, 35, 27)
womenMeans = (25, 32, 34, 20, 25)
menStd = (2, 3, 4, 1, 2)
womenStd = (3, 5, 2, 3, 3)
ind = np.arange(N) # the x locations for the groups
width = 0.35 # the width of the bars: can also be len(x) sequence
p1 = plt.bar(ind, menMeans, width, color='#d62728', yerr=menStd)
p2 = plt.bar(ind, womenMeans, width,
bottom=menMeans, yerr=womenStd)
plt.ylabel('Scores')
plt.title('Scores by group and gender')
plt.xticks(ind, ('G1', 'G2', 'G3', 'G4', 'G5'))
plt.yticks(np.arange(0, 81, 10))
plt.legend((p1[0], p2[0]), ('Men', 'Women'))
# from LoggingController import LoggingController
# controller = LoggingController()
# controller.profile_name = 'default'
# controller.s3_bucket = 'emr-related-files'
# controller.log_string('first')
# controller.log_matplotlib_plot(plt)
# controller.log_string('third')
from MarkdownBuilder import MarkdownBuilder
builder = MarkdownBuilder()
builder.profile_name = 'default'
builder.s3_bucket = 'emr-related-files'
builder.path_to_save_logs_local = 'logs'
builder.build_markdowns()
| {
"repo_name": "kcrandall/Kaggle_Mercedes_Manufacturing",
"path": "spark/experiements/reza/logging_lib/example.py",
"copies": "6",
"size": "1160",
"license": "mit",
"hash": -3427162537374826500,
"line_mean": 32.1428571429,
"line_max": 71,
"alpha_frac": 0.6931034483,
"autogenerated": false,
"ratio": 2.8292682926829267,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.6522371740982926,
"avg_score": null,
"num_lines": null
} |
# a stacked bar plot with errorbars
import sys
import os.path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), os.path.pardir, "Classification")))
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('agg')
import util
cmb_model = util.load_model("../Data/cmb.pkl")
tamc_model = util.load_model("../Data/tamc.pkl")
cl_model = util.load_model("../Data/cl.pkl")
data_cmb = util.DatasetStatistics(cmb_model.data_frame, cmb_model.data_frame.loc[:, cmb_model.class_attribute]).classes.to_dict()
data_tamc = util.DatasetStatistics(tamc_model.data_frame, tamc_model.data_frame.loc[:, cmb_model.class_attribute]).classes.to_dict()
data_cl = util.DatasetStatistics(cl_model.data_frame, cl_model.data_frame.loc[:, cmb_model.class_attribute]).classes.to_dict()
filenames = ['SFig3/SFig3_A.png', 'SFig3/SFig3_B.png', 'SFig3/SFig3_C.png']
datasets = [data_cmb, data_tamc, data_cl]
colors = ["#1f77b4", "#ff7f0e", "#2ca02c"]
y_ranges = [55000, 40000, 45000]
subplot = 311
borderw = 1.0
borderh = 2.0
figw_base = (30.0-borderw)
figh_base = 7.0-borderw
figh = borderw+figh_base
for filename, data, color in zip(filenames, datasets, colors):
data_sort = sorted(sorted(data.items(), key=lambda x:x[0]), key=lambda x:x[1], reverse=True)
names = [_[0] for _ in data_sort]
values = [_[1] for _ in data_sort]
print names, values
N = len(names)
ind = np.arange(0, N) # the x locations for the groups
space = 0.3
i = 2.0
width = (1-space) # the width of the bars: can also be len(x) sequence
figw = borderw+(figw_base/200.0*N)
fig = plt.figure(figsize=(figw, figh))
ax = fig.add_subplot(111)
pos = ind
p1 = ax.bar(pos, values, width=width, align='center', color=color)
plt.ylabel('Count')
plt.xlabel('Label')
ax.set_xlim(-width,N-1+width)
ax.set_xticks(ind)
ax.set_xticklabels(names, rotation=90)
ax.set_ylim(0, 52000)
ax.set_yticks(np.arange(0, 52000, 5000))
plt.subplots_adjust(left=(borderw-0.1)/figw, bottom=(borderh-0.1)/figh, right=1-0.1/figw, top=1-0.1/figh)
# or
#plt.tight_layout()
fig.savefig(filename, dpi=300)
fig.savefig(filename.replace('png', 'svg'), dpi=300)
fig.savefig(filename.replace('png', 'svg'), rasterize=False, dpi=300)
| {
"repo_name": "dabrze/CheckMyBlob",
"path": "Figures/SFig3_labels.py",
"copies": "1",
"size": "2364",
"license": "mit",
"hash": 3654946939095758300,
"line_mean": 32.7647058824,
"line_max": 132,
"alpha_frac": 0.6497461929,
"autogenerated": false,
"ratio": 2.7203682393555813,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.38701144322555814,
"avg_score": null,
"num_lines": null
} |
""" A Stack using a linked list like structure """
from typing import Any, Optional
class Node:
def __init__(self, data):
self.data = data
self.next = None
def __str__(self):
return f"{self.data}"
class LinkedStack:
"""
Linked List Stack implementing push (to top),
pop (from top) and is_empty
>>> stack = LinkedStack()
>>> stack.is_empty()
True
>>> stack.push(5)
>>> stack.push(9)
>>> stack.push('python')
>>> stack.is_empty()
False
>>> stack.pop()
'python'
>>> stack.push('algorithms')
>>> stack.pop()
'algorithms'
>>> stack.pop()
9
>>> stack.pop()
5
>>> stack.is_empty()
True
>>> stack.pop()
Traceback (most recent call last):
...
IndexError: pop from empty stack
"""
def __init__(self) -> None:
self.top: Optional[Node] = None
def __iter__(self):
node = self.top
while node:
yield node.data
node = node.next
def __str__(self):
"""
>>> stack = LinkedStack()
>>> stack.push("c")
>>> stack.push("b")
>>> stack.push("a")
>>> str(stack)
'a->b->c'
"""
return "->".join([str(item) for item in self])
def __len__(self):
"""
>>> stack = LinkedStack()
>>> len(stack) == 0
True
>>> stack.push("c")
>>> stack.push("b")
>>> stack.push("a")
>>> len(stack) == 3
True
"""
return len(tuple(iter(self)))
def is_empty(self) -> bool:
"""
>>> stack = LinkedStack()
>>> stack.is_empty()
True
>>> stack.push(1)
>>> stack.is_empty()
False
"""
return self.top is None
def push(self, item: Any) -> None:
"""
>>> stack = LinkedStack()
>>> stack.push("Python")
>>> stack.push("Java")
>>> stack.push("C")
>>> str(stack)
'C->Java->Python'
"""
node = Node(item)
if not self.is_empty():
node.next = self.top
self.top = node
def pop(self) -> Any:
"""
>>> stack = LinkedStack()
>>> stack.pop()
Traceback (most recent call last):
...
IndexError: pop from empty stack
>>> stack.push("c")
>>> stack.push("b")
>>> stack.push("a")
>>> stack.pop() == 'a'
True
>>> stack.pop() == 'b'
True
>>> stack.pop() == 'c'
True
"""
if self.is_empty():
raise IndexError("pop from empty stack")
assert isinstance(self.top, Node)
pop_node = self.top
self.top = self.top.next
return pop_node.data
def peek(self) -> Any:
"""
>>> stack = LinkedStack()
>>> stack.push("Java")
>>> stack.push("C")
>>> stack.push("Python")
>>> stack.peek()
'Python'
"""
if self.is_empty():
raise IndexError("peek from empty stack")
assert self.top is not None
return self.top.data
def clear(self) -> None:
"""
>>> stack = LinkedStack()
>>> stack.push("Java")
>>> stack.push("C")
>>> stack.push("Python")
>>> str(stack)
'Python->C->Java'
>>> stack.clear()
>>> len(stack) == 0
True
"""
self.top = None
if __name__ == "__main__":
from doctest import testmod
testmod()
| {
"repo_name": "TheAlgorithms/Python",
"path": "data_structures/stacks/linked_stack.py",
"copies": "1",
"size": "3713",
"license": "mit",
"hash": -3849588068950014500,
"line_mean": 21.3522012579,
"line_max": 54,
"alpha_frac": 0.4284944789,
"autogenerated": false,
"ratio": 4.08021978021978,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.500871425911978,
"avg_score": null,
"num_lines": null
} |
# AST analysis
import os
import coda.compiler
from coda import descriptors
from coda import types
from coda.runtime.object import Object
from coda.runtime.typeregistry import TypeRegistry
from collections import defaultdict
from coda.runtime import typemixins
class GenericType(coda.types.Type):
def __init__(self, typeCtor):
self.__typeCtor = typeCtor
self.__numArgs = len(typeCtor.DESCRIPTOR.getFields())
def getNumArgs(self):
return self.__numArgs
def __call__(self, args):
ty = self.__typeCtor()
for field, value in zip(self.__typeCtor.DESCRIPTOR.getFields(), args):
if value is not coda.types.ERROR:
assert type(value) is not coda.types.Type
field.setValue(ty, value)
return ty
def __str__(self):
return str(self.__typeCtor)
TYPENAMES = {
'bool': coda.types.BOOL,
'i16': coda.types.I16,
'i32': coda.types.I32,
'i64': coda.types.I64,
'float': coda.types.FLOAT,
'double': coda.types.DOUBLE,
'string': coda.types.STRING,
'bytes': coda.types.BYTES,
'list': GenericType(coda.types.ListType),
'set': GenericType(coda.types.SetType),
'map': GenericType(coda.types.MapType),
}
class FileScope(typemixins.DeclTypeMixin):
'''Helper class used to represent the file-level lookup scope.'''
def __init__(self, file):
self._file = file
def getFullName(self):
return self._file.getPackage()
def getFile(self):
return self._file
class ALLOWED_TYPES:
LIST_ELEMENT = frozenset([
types.TypeKind.BOOL,
types.TypeKind.INTEGER,
types.TypeKind.FLOAT,
types.TypeKind.DOUBLE,
types.TypeKind.STRING,
types.TypeKind.BYTES,
types.TypeKind.LIST,
types.TypeKind.SET,
types.TypeKind.MAP,
types.TypeKind.STRUCT,
types.TypeKind.ENUM,
])
SET_ELEMENT = frozenset([
types.TypeKind.INTEGER,
types.TypeKind.STRING,
types.TypeKind.BYTES,
types.TypeKind.STRUCT,
types.TypeKind.ENUM,
])
MAP_KEY = frozenset([
types.TypeKind.INTEGER,
types.TypeKind.STRING,
types.TypeKind.BYTES,
types.TypeKind.STRUCT,
types.TypeKind.ENUM,
])
MAP_VALUE = frozenset([
types.TypeKind.BOOL,
types.TypeKind.INTEGER,
types.TypeKind.FLOAT,
types.TypeKind.DOUBLE,
types.TypeKind.STRING,
types.TypeKind.BYTES,
types.TypeKind.LIST,
types.TypeKind.SET,
types.TypeKind.MAP,
types.TypeKind.STRUCT,
types.TypeKind.ENUM,
])
class Analyzer:
def __init__(self, errorReporter):
self.errorReporter = errorReporter
self.typenames = dict(TYPENAMES)
self.visibleFiles = defaultdict(set)
self.imports = {}
self.types = {} # Dictionary of unique types
self.filesToAnalyze = []
self.structsToAnalyze = []
self.enumsToAnalyze = []
self.fieldsToAnalyze = []
self.methodsToAnalyze = []
self.extensionToAnalyze = []
self.typeRegistry = TypeRegistry()
def run(self, asts, importAsts):
for fileAst in importAsts:
fd = self.processFile(fileAst)
self.filesToAnalyze.append((fileAst, fd))
fdList = []
for fileAst in asts:
fd = self.processFile(fileAst)
fdList.append(fd)
self.filesToAnalyze.append((fileAst, fd))
for fileAst, fd in self.filesToAnalyze:
self.analyzeFile(fileAst, fd)
for fileAst, fd in self.filesToAnalyze:
self.computeVisibleFiles(fd)
# Process the list again now that we've registered all types and extensions
for structAst, struct in self.structsToAnalyze:
self.analyzeStruct(structAst, struct)
for enumAst, enum in self.enumsToAnalyze:
self.analyzeEnum(enumAst, enum)
for fieldAst, field, extends, scope in self.extensionToAnalyze:
self.analyzeExtensionField(fieldAst, field, extends, scope)
for fileAst, fd in self.filesToAnalyze:
self.setFileOptions(fileAst, fd)
for fileAst, fd in self.filesToAnalyze:
self.setImportPackages(fd)
for structAst, struct in self.structsToAnalyze:
self.setStructOptions(structAst, struct)
for fieldAst, field in self.fieldsToAnalyze:
self.setFieldOptions(fieldAst, field)
for methodAst, method in self.methodsToAnalyze:
self.setMethodOptions(methodAst, method)
return fdList
def processFile(self, fileAst):
assert isinstance(fileAst, coda.compiler.ast.File)
dirname, filename = os.path.split(fileAst.path)
fd = descriptors.FileDescriptor()
fd.setName(filename).setDirectory(dirname).setPackage(fileAst.package)
self.imports[fileAst.path] = fd
# Check for invalid package name?
# Analyze contents of fileAst
nameprefix = '' if fd.getPackage() is None else fd.getPackage() + '.'
for extensionAst in fileAst.extensions:
extensions = self.defineExtension(extensionAst, fd, None)
fd.getMutableExtensions().extend(extensions)
for structAst in fileAst.structs:
struct = self.defineStruct(nameprefix, structAst, fd, None)
fd.getMutableStructs().append(struct)
for enumAst in fileAst.enums:
enum = self.defineEnum(nameprefix, enumAst, fd, None)
fd.getMutableEnums().append(enum)
return fd
def defineStruct(self, prefix, ast, file, enclosingType):
fullname = prefix + ast.name
assert isinstance(ast, coda.compiler.ast.StructDef)
struct = descriptors.StructDescriptor(None)
struct.setName(ast.name).setFile(file)
if enclosingType:
struct.setEnclosingType(enclosingType)
struct.setSourceLine(ast.location.lineno)
if ast.extensionRange != (0, 0):
minExt, maxExt = ast.extensionRange
assert isinstance(minExt, int)
assert isinstance(maxExt, int)
if minExt > maxExt or minExt < 0 or maxExt > 2**32-1:
self.errorReporter.errorAt(
ast.typeId.location,
'Invalid extension range: {0} .. {1}'.format(minExt, maxExt))
else:
struct.setMinExtension(minExt)
struct.setMaxExtension(maxExt)
self.defineTypeName(fullname, struct, ast.location)
# Analyze contents of struct
prefix = fullname + '.'
for extensionAst in ast.extensions:
extensions = self.defineExtension(extensionAst, file, struct)
struct.getMutableExtensions().extend(extensions)
for structAst in ast.structs:
st = self.defineStruct(prefix, structAst, file, struct)
struct.getMutableStructs().append(st)
for enumAst in ast.enums:
enum = self.defineEnum(prefix, enumAst, file, struct)
struct.getMutableEnums().append(enum)
self.structsToAnalyze.append((ast, struct))
return struct
def defineEnum(self, prefix, ast, file, enclosingType):
fullname = prefix + ast.name
assert isinstance(ast, coda.compiler.ast.EnumDef)
desc = descriptors.EnumDescriptor()
desc.setName(ast.name).setFile(file)
if enclosingType:
desc.setEnclosingType(enclosingType)
desc.setSourceLine(ast.location.lineno)
self.defineTypeName(fullname, desc, ast.location)
self.enumsToAnalyze.append((ast, desc))
nextIndex = 0
for valueAst in ast.values:
if valueAst.value is not None:
nextIndex = valueAst.value.value
self.defineEnumValue(valueAst, desc, nextIndex)
nextIndex += 1
return desc
def defineExtension(self, ast, file, enclosingType):
assert isinstance(ast, coda.compiler.ast.StructDef)
scope = enclosingType
if not scope:
scope = FileScope(file)
result = []
for fieldAst in ast.fields:
extField = descriptors.ExtensionField()
extField.setFile(file)
extField.setEnclosingType(enclosingType)
extField.setSourceLine(ast.location.lineno)
extField.setName(fieldAst.name)
extField.setId(fieldAst.index)
self.extensionToAnalyze.append((fieldAst, extField, ast.extends, scope))
result.append(extField)
return result
def analyzeFile(self, fileAst, fd):
for _, relPath in fileAst.imports:
importPath = os.path.join(os.path.dirname(fileAst.path), relPath)
# importFile = self.imports[importPath]
imp = coda.descriptors.FileDescriptor.Import()
imp.setPath(importPath)
# imp.setPackage(importFile.getOptions().getPackage())
fd.getMutableImports().append(imp)
def setImportPackages(self, fd):
for imp in fd.getImports():
importPath = imp.getPath()
importFile = self.imports[importPath]
imp.setPackage(importFile.getOptions().getPackage())
def computeVisibleFiles(self, fd):
path = os.path.join(fd.getDirectory(), fd.getName())
for imp in fd.getImports():
importPath = imp.getPath()
importFile = self.imports[importPath]
self.visibleFiles[path].add(importPath)
if importPath not in self.visibleFiles:
visibleSet = self.visibleFiles[importPath]
self.computeVisibleFiles(importFile)
self.visibleFiles[path].update(visibleSet)
def analyzeStruct(self, ast, struct):
'''@type ast: coda.compiler.ast.StructDef
@type struct: descriptors.StructType'''
if ast.typeId is not None:
if isinstance(ast.typeId, coda.compiler.ast.Ident):
enum, _, value = ast.typeId.value.rpartition('.')
if not enum:
self.errorReporter.errorAt(
ast.typeId.location,
'Incorrect formatter for type ID: {0}'.format(ast.typeId))
return
enumTy = self.lookupTypeName(enum, struct, ast.typeId.location)
if enumTy:
if enumTy.typeId() != types.TypeKind.ENUM:
self.errorReporter.errorAt(
ast.typeId.location,
'Type id is not a number \'{0}\''.format(ast.typeId))
return
enumVal = enumTy.getValue(value)
if enumVal is None:
self.errorReporter.errorAt(
ast.typeId.location,
'Unknown enumeration value \'{0}\''.format(ast.typeId))
return
assert enumVal.getValue() is not None
struct.setTypeId(enumVal.getValue())
else:
struct.setTypeId(ast.typeId)
if struct.typeId() == 0:
self.errorReporter.errorAt(ast.typeId.location, 'Type id of 0 is not valid')
return
if ast.baseType:
assert struct.hasTypeId()
assert struct.getTypeId() is not None
baseType = self.getType(ast.baseType, struct)
if baseType is types.ERROR:
return baseType
if baseType is not None and not isinstance(
baseType, descriptors.StructType):
self.errorReporter.errorAt(
ast.baseType.location,
'Base type \'{0}\' is not a struct'.format(ast.baseType.name))
return baseType
# if not baseType.hasTypeId():
# self.errorReporter.errorAt(
# ast.baseType.location,
# 'Base type \'{0}\' must declare a type id to be inheritable'.format(ast.baseType.name))
# return types.ERROR
struct.setBaseType(baseType)
extensibleBase = self.getExtensibleBase(baseType)
rootType = baseType
while rootType.hasBaseType():
rootType = rootType.getBaseType()
if rootType.hasTypeId():
self.errorReporter.errorAt(
ast.baseType.location,
'Root type \'{0}\' must not have a type id.'.format(rootType.getName()))
return types.ERROR
typeForId = self.typeRegistry.getSubtype(rootType, struct.getTypeId())
if typeForId:
self.errorReporter.errorAt(
ast.location,
'Attempt to register type {0} with ID {1} but it is already used by {2}'.format(
ast.name, struct.getTypeId(), typeForId.getName()))
return types.ERROR
else:
self.typeRegistry.addSubtype(struct)
if struct.hasMinExtension() and extensibleBase:
self.errorReporter.errorAt(
ast.location,
'Struct {0} cannot override the extension range of base class {1}'.format(
ast.name, extensibleBase.getName()))
return types.ERROR
for fieldAst in sorted(ast.fields, key = lambda fld: fld.index):
self.defineField(fieldAst, struct)
for methodAst in ast.methods:
self.defineMethod(methodAst, struct)
def analyzeExtensionField(self, ast, field, extends, scope):
struct = self.getType(extends, scope)
if not struct or struct is types.ERROR:
return types.ERROR
extensibleStruct = self.getExtensibleBase(struct)
if not extensibleStruct:
self.errorReporter.errorAt(
ast.location,
'No extension range defined for struct {0}.'.format(struct.getName()))
return
struct = extensibleStruct
field.setExtends(struct)
field.setType(self.getType(ast.fieldType, scope))
if not struct.hasMinExtension():
self.errorReporter.errorAt(
ast.location,
'No extension range defined for struct {0}.'.format(struct.getName()))
elif ast.index < struct.getMinExtension() or ast.index > struct.getMaxExtension():
self.errorReporter.errorAt(
ast.location,
'Extension ID {0} does not fall within the allowed extension range '
'for struct {1}: {2}..{3}.'.format(
ast.index, struct.getName(), struct.getMinExtension(), struct.getMaxExtension()))
elif self.typeRegistry.getExtension(struct, ast.index):
self.errorReporter.errorAt(
ast.location,
'Extension ID {0} for type {1} is already in use.'.format(
ast.index, struct.getName()))
else:
self.typeRegistry.addExtension(field)
def getExtensibleBase(self, struct):
while True:
if struct.hasMinExtension():
return struct
elif struct.hasBaseType():
struct = struct.getBaseType()
else:
return None
def defineField(self, ast, struct):
'''@type ast: ast.StructDef.Field
@type struct: descriptors.StructType'''
struct.checkMutable()
assert isinstance(ast, coda.compiler.ast.StructDef.Field)
if struct.getFieldById(ast.index) is not None:
assert ast.location
self.errorReporter.errorAt(
ast.location,
'Field with index {0} already defined.'.format(ast.index))
return
if struct.getField(ast.name) is not None:
self.errorReporter.errorAt(
ast.location,
'Field with name {0} already defined.'.format(ast.name))
return
if struct.hasMaxExtension():
assert struct.hasMaxExtension()
if ast.index >= struct.getMinExtension() and ast.index <= struct.getMaxExtension():
self.errorReporter.errorAt(
ast.location,
'Field index {0} falls within the extension range.'.format(ast.index))
struct.checkMutable()
field = struct.defineField(
ast.name,
ast.index,
self.getType(ast.fieldType, struct))
self.fieldsToAnalyze.append((ast, field))
def defineMethod(self, ast, struct):
'''@type ast: ast.StructDef.Method
@type struct: descriptors.StructType'''
struct.checkMutable()
assert isinstance(ast, coda.compiler.ast.StructDef.Method)
if ast.index is not None and struct.getFieldById(ast.index) is not None:
assert ast.location
self.errorReporter.errorAt(
ast.location,
'Field with index {0} already defined'.format(ast.index))
return
if struct.getField(ast.name) is not None:
self.errorReporter.errorAt(
ast.location,
'Field with name {0} already defined'.format(ast.name))
return
paramList = []
for p in ast.params:
paramType = self.getType(p.type, struct)
param = descriptors.StructDescriptor.Param().setName(ast.name).setType(paramType)
paramList.append(param)
struct.checkMutable()
method = struct.defineMethod(
ast.name,
ast.index,
paramList,
self.getType(ast.returnType, struct))
self.methodsToAnalyze.append((ast, method))
def analyzeEnum(self, ast, enum):
'''@type ast: coda.compiler.ast.EnumDef
@type enum: descriptors.EnumType'''
def defineEnumValue(self, ast, enum, index):
'''@type ast: ast.StructDef.Field
@type struct: descriptors.StructType'''
assert isinstance(ast, coda.compiler.ast.EnumDef.Value)
if enum.getValue(ast.name) is not None:
self.errorReporter.errorAt(
ast.location,
'Value with name {0} already defined'.format(ast.name))
return
value = descriptors.EnumType.Value()
value.setName(ast.name)
value.setValue(index)
# field.setSourceLine(ast.location.lineno)
enum.addValue(value)
def setFileOptions(self, ast, fd):
'''@type ast: coda.compiler.ast.File
@type struct: descriptors.FileDescriptor'''
if ast.options:
optionHolder = fd.getMutableOptions()
for option in ast.options:
self.setOption(
optionHolder, option, descriptors.FileOptions)
optionHolder.freeze()
def setStructOptions(self, ast, struct):
'''@type ast: coda.compiler.ast.StructDef
@type struct: descriptors.StructType'''
if ast.options:
optionHolder = struct.getMutableOptions()
for option in ast.options:
self.setOption(optionHolder, option, descriptors.StructOptions)
optionHolder.freeze()
if (struct.getOptions().isShared()
and struct.hasBaseType()
and not struct.getBaseType().getOptions().isShared()):
self.errorReporter.errorAt(
ast.location,
'Type {0} cannot be a shared type, because it is a subtype of a non-shared type.'
.format(struct.getName()))
if (struct.getOptions().isReference()
and struct.hasBaseType()
and not struct.getBaseType().getOptions().isReference()):
self.errorReporter.errorAt(
ast.location,
'Type {0} cannot be a reference type, because it is a subtype of a non-reference type.'
.format(struct.getName()))
def setFieldOptions(self, ast, field):
'''@type ast: coda.compiler.ast.StructDef.Field
@type field: descriptors.StructType.Field'''
if ast.options:
optionHolder = field.getMutableOptions()
for option in ast.options:
self.setOption(optionHolder, option, descriptors.FieldOptions)
optionHolder.freeze()
def setMethodOptions(self, ast, method):
'''@type ast: coda.compiler.ast.StructDef.Method
@type field: descriptors.StructType.Method'''
if ast.options:
optionHolder = method.getMutableOptions()
for option in ast.options:
self.setOption(optionHolder, option, descriptors.MethodOptions)
optionHolder.freeze()
def setOption(self, optionHolder, option, optionClass):
field = optionHolder.descriptor().findField(option.name)
if not field:
bestWord = None
bestDist = 5
for field in optionHolder.descriptor().getFields():
dist = levenshtein(field.getName(), option.name)
if dist < bestDist:
bestDist = dist
bestWord = field.getName()
if bestWord:
self.errorReporter.errorAt(
option.location,
"Unknown option '{0}', did you mean '{1}'?".format(option.name, bestWord))
else:
self.errorReporter.errorAt(
option.location, "Unknown option '{0}'".format(option.name))
else:
ftype = field.getType()
if ftype.typeId() == types.TypeKind.MAP:
keyType = ftype.getKeyType()
if isinstance(keyType, types.CollectionType):
self.errorReporter.errorAt(
option.location,
"Invalid key type for option '{0}:{1}'.".format(option.name, option.scope))
valueType = ftype.getValueType()
scopeMap = field.getMutableValue(optionHolder)
if scopeMap is Object.EMPTY_MAP:
scopeMap = {}
field.setValue(optionHolder, scopeMap)
if option.scope in scopeMap:
self.errorReporter.errorAt(
option.location,
"Option '{0}:{1}' has already been set".format(option.name, option.scope))
else:
value = coerceValue(valueType, option.value)
if option.scope is None:
option.scope = ''
scopeMap[option.scope] = value
elif option.scope:
self.errorReporter.errorAt(
option.location, "Option '{0}' is not scoped".format(option.name))
else:
field.setValue(optionHolder, coerceValue(ftype, option.value))
def getType(self, typeAst, scope):
if isinstance(typeAst, coda.compiler.ast.TypeName):
ty = self.lookupTypeName(typeAst.name, scope, typeAst.location)
if ty is None:
return types.ERROR
if isinstance(ty, GenericType):
self.errorReporter.errorAt(
typeAst.location,
'Missing type parameters for type \'{0}\''.format(typeAst))
return ty
elif isinstance(typeAst, coda.compiler.ast.ModifiedType):
base = self.getType(typeAst.base, scope)
if not base or base is types.ERROR:
return types.ERROR
if base.typeId() != types.TypeKind.STRUCT:
self.errorReporter.errorAt(
typeAst.location, 'Type modifiers can only be applied to struct types')
return types.ERROR
ty = types.ModifiedType()
assert type(base) is not types.Type
ty.setElementType(base)
if typeAst.const:
ty.setConst(True)
if typeAst.shared:
ty.setShared(True)
return ty
elif isinstance(typeAst, coda.compiler.ast.SpecializedType):
args = []
for argAst in typeAst.args:
argType = self.getType(argAst, scope)
if argType is None or argType is types.ERROR:
return types.ERROR
args.append(argType)
assert isinstance(typeAst.base, coda.compiler.ast.TypeName)
genericType = self.lookupTypeName(
typeAst.base.name, scope, typeAst.base.location)
if genericType is None or genericType is types.ERROR:
return types.ERROR
if not isinstance(genericType, GenericType):
self.errorReporter.errorAt(
typeAst.location,
'Type \'{0}\' does not have type parameters'.format(
typeAst.base.location))
return types.ERROR
if len(args) != genericType.getNumArgs():
self.errorReporter.errorAt(
typeAst.location,
'Incorrect number of type parameters for \'{0}\': ' +
'found {1}, expected {2}'.format(
typeAst.base.name, len(args), genericType.getNumArgs()))
return types.ERROR
ty = genericType(args) #.freeze()
if ty is types.ERROR:
return ty
elif ty.typeId() == types.TypeKind.LIST:
elemType = self.stripModifiers(ty.getElementType())
if elemType.typeId() not in ALLOWED_TYPES.LIST_ELEMENT:
self.errorReporter.errorAt(typeAst.location,
'Lists of type \'{0}\' are not permitted '.format(ty.getElementType().getName()))
elif ty.typeId() == types.TypeKind.SET:
elemType = self.stripModifiers(ty.getElementType())
if elemType.typeId() not in ALLOWED_TYPES.SET_ELEMENT:
self.errorReporter.errorAt(typeAst.location,
'Sets of type \'{0}\' are not permitted '.format(ty.getElementType().getName()))
elif ty.typeId() == types.TypeKind.MAP:
keyType = self.stripModifiers(ty.getKeyType())
valueType = self.stripModifiers(ty.getValueType())
if keyType.typeId() not in ALLOWED_TYPES.MAP_KEY:
self.errorReporter.errorAt(typeAst.location,
'Map keys of type \'{0}\' are not permitted '.format(ty.getElementType().getName()))
elif valueType.typeId() not in ALLOWED_TYPES.MAP_VALUE:
self.errorReporter.errorAt(typeAst.location,
'Map values of type \'{0}\' are not permitted '.format(
ty.getElementType().getName()))
return self.types.setdefault(ty.key(), ty)
else:
self.errorReporter.errorAt(
typeAst.location, 'Unknown type \'{0}\''.format(typeAst))
return types.ERROR
def stripModifiers(self, ty):
while isinstance(ty, types.ModifiedType):
ty = ty.getElementType()
return ty
def lookupTypeName(self, name, scope, location):
'''Look up a type by name. Report an error and return None if the
type name could not be found.
@type name: string
@type scope: typemixins.DeclTypeMixin'''
ty = self.typenames.get(name)
if ty is None and scope:
prefix = scope.getFullName()
while True:
ty = self.typenames.get(prefix + '.' + name)
if ty is not None: break
dot = prefix.rfind('.')
if dot < 0:
break
prefix = prefix[:dot]
if ty is None:
bestWord = None
bestDist = 5
for typename in self.typenames.keys():
if typename.endswith('.' + name):
bestWord = typename
bestDist = 0
break
elif typename.endswith(name):
bestWord = typename
bestDist = 1
else:
dist = levenshtein(typename, name)
if dist < bestDist:
bestDist = dist
bestWord = typename
if bestWord:
self.errorReporter.errorAt(
location,
'Unknown type \'{0}\', did you mean \'{1}\'?'.format(
name, bestWord))
else:
self.errorReporter.errorAt(
location, 'Unknown type \'{0}\''.format(name))
return None
if scope and isinstance(ty, types.DeclType):
srcPath = self.getFilePathForType(scope)
dstPath = self.getFilePathForType(ty)
if dstPath != srcPath and dstPath not in self.visibleFiles[srcPath]:
self.errorReporter.errorAt(
location,
'Type \'{0}\' is defined in file \'{1}\', which was not included by \'{2}\'.'.format(
name, dstPath, srcPath))
return ty
def getFilePathForType(self, ty):
if isinstance(ty, FileScope):
file = ty.getFile()
return os.path.join(file.getDirectory(), file.getName())
if isinstance(ty, types.DeclType):
topLevelType = ty
while topLevelType.getEnclosingType() is not None:
topLevelType = topLevelType.getEnclosingType()
file = topLevelType.getFile()
if file:
return os.path.join(file.getDirectory(), file.getName())
return None
def defineTypeName(self, name, sym, location):
assert location
if name in self.typenames:
self.errorReporter.errorAt(location,
"Typename '{0}' already defined".format(name))
return
self.typenames[name] = sym
def coerceValue(toType, value):
if toType.typeId() == types.TypeKind.BOOL:
if isinstance(value, coda.compiler.ast.BooleanValue):
return value.value
raise TypeError('Cannot convert {0} to boolean'.format(value))
elif toType.typeId() == types.TypeKind.INTEGER:
if isinstance(value, coda.compiler.ast.IntegerValue):
return value.value
raise TypeError('Cannot convert {0} to int'.format(value))
elif (toType.typeId() == types.TypeKind.FLOAT or
toType.typeId() == types.TypeKind.DOUBLE):
assert False, "Float constants not implemented"
# if isinstance(value, coda.compiler.ast.FloatValue):
# return value.value
# raise TypeError('Cannot convert {0} to float'.format(value))
elif toType.typeId() == types.TypeKind.STRING:
if isinstance(value, coda.compiler.ast.StringValue):
return value.value
raise TypeError('Cannot convert {0} to string'.format(value))
elif toType.typeId() == types.TypeKind.BYTES:
assert False, "Bytes constants not implemented"
# if type(value) == bytes:
# return value
# raise TypeError('Cannot convert {0} to bytes'.format(value))
elif toType.typeId() == types.TypeKind.LIST:
if isinstance(value, coda.compiler.ast.ListValue):
return tuple([coerceValue(toType.getElementType(), el) for el in value.value])
raise TypeError('Cannot convert {0} to list'.format(value))
else:
raise TypeError('Cannot convert {0} to {1}'.format(value, str(toType)))
def levenshtein(s1, s2):
if len(s1) < len(s2):
return levenshtein(s2, s1)
# len(s1) >= len(s2)
if len(s2) == 0:
return len(s1)
previous_row = range(len(s2) + 1)
for i, c1 in enumerate(s1):
current_row = [i + 1]
for j, c2 in enumerate(s2):
insertions = previous_row[j + 1] + 1
# j+1 instead of j since previous_row and current_row are one character longer than s2
deletions = current_row[j] + 1
substitutions = previous_row[j] + (c1 != c2)
current_row.append(min(insertions, deletions, substitutions))
previous_row = current_row
return previous_row[-1]
| {
"repo_name": "viridia/coda",
"path": "libs/coda/compiler/analyzer.py",
"copies": "1",
"size": "28667",
"license": "apache-2.0",
"hash": 7963442063808145000,
"line_mean": 35.1957070707,
"line_max": 101,
"alpha_frac": 0.6524226462,
"autogenerated": false,
"ratio": 3.7268590743629746,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9821404795450055,
"avg_score": 0.011575385022584091,
"num_lines": 792
} |
"""A standardized logger.
The log level can be configured using the environment variable
CLOUDLY_LOG_LEVEL. Otherwise, defaults to 'info'.
"""
import os
import logging
from logging import DEBUG, INFO, WARNING, ERROR, CRITICAL # noqa
FORMAT = "%(asctime)s] %(levelname)s %(module)s %(funcName)s: %(message)s"
DEFAULT_LOGFILE = "cloudly.log"
log_level_string = os.environ.get("CLOUDLY_LOG_LEVEL", "info")
log_level_map = {
'debug': DEBUG,
'info': INFO,
'warning': WARNING,
'error': ERROR,
'critical': CRITICAL
}
try:
default_log_level = log_level_map[log_level_string]
except KeyError, exception:
print ("WARNING: Log level {!r} not supported. "
"Using 'info' instead.".format(
log_level_string))
default_log_level = INFO
loggers = {}
def init(name=None, log_level=default_log_level):
"""Create logger with a default format. """
if name in loggers:
logger = loggers.get(name)
else:
logger = logging.getLogger(name)
configure_logger(logger, log_level)
loggers[name] = logger
return logger
def configure_logger(logger, log_level, log_to_file=False):
"""Configure the given logger with:
- the given log level
- a console handler
- a file handler if log_to_file is True.
"""
logger.setLevel(log_level)
formatter = logging.Formatter(FORMAT)
# Console handler
console_handler = logging.StreamHandler()
console_handler.setLevel(log_level)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
if log_to_file:
# File handler
file_handler = logging.FileHandler(DEFAULT_LOGFILE)
file_handler.setLevel(log_level)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
| {
"repo_name": "ooda/cloudly",
"path": "cloudly/logger.py",
"copies": "1",
"size": "1813",
"license": "mit",
"hash": -27608756042359720,
"line_mean": 26.8923076923,
"line_max": 74,
"alpha_frac": 0.6613348042,
"autogenerated": false,
"ratio": 3.7458677685950414,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49072025727950414,
"avg_score": null,
"num_lines": null
} |
## AST and Parser
# Local Imports
import Token
from Lexer import *
## AST
#
# Abstract Syntax Tree that represents the parse tree of the
# parsed Tokens
class AST(object):
pass
## All
#
# Special ALL keyword. Means all objects in current state with
# type arg
class All(AST):
## Constructor
def __init__(self, arg):
## @var arg
# The type given as a String argument to keywork ALL.
self.arg = arg
## Python
#
# Special PYTHON keyword. Allows user limited ability to inline
# python code
class Python(AST):
## Constructor
def __init__(self, code):
## @var arg
# The type given as a String argument to keywork ALL.
self.code = code
## Type
#
# Special TYPE keyword. Means get the type of arg
class Type(AST):
## Constructor
def __init__(self, arg):
## @var arg
# The object name as a String whose type is desired
self.arg = arg
## State
#
# Special State keyword. Used for accessing current state
class State(AST):
## Constructor
def __init__(self, args):
## @var arg
# The object name as a String whose type is desired
self.args = args
## Digit
#
# Represents a decimal digit
class Digit(AST):
## Constructor
def __init__(self, value):
## @var value
# Contains the single decimal digit as a String
self.value = value
## Int
#
# An integer which is represented as a list of digits
class Int(AST):
## Constructor
def __init__(self, sign):
## @var digits
# List of digits that comprise the integer (left-to-right)
self.digits = []
self.sign = sign
## Float
#
# Represented as an two integers (separated by a dot syntactically)
class Flt(AST):
## Constructor
def __init__(self, left, right, sign):
## @var left
# The integer to the left of the dot
self.left = left
## @var right
# The integer to the right of the dot
self.right = right
self.sign = sign
## Literal
#
# A string literal, marked in the custom language by quotes
class Literal(AST):
## Constructor
def __init__(self, name):
## @var name
# holds the literal value
self.name = name
## Boolean
#
# Boolean statement that takes the form:
# e1 comp e2
# e1 and e2 are expressions which can be
# variables, literals, or keyword phrases
# comp is a comparative operator. Currently
# only '=' is supported
class Boolean(AST):
## Constructor
def __init__(self, e1, op, e2):
## @var e1
# Expression to left of comparitive operator
self.e1 = e1
## @var op
# Comparative operator (should be '=')
## @var token
# same as op
self.token = self.op = op
## @var e2
# Expression to right of comparative operator
self.e2 = e2
## Boolean Expression
#
# A list of Boolean Statements separated by either &&s or ||s
class BoolExpr(AST):
## Constructor
def __init__(self, bound, left, op, right):
## @var left
# Boolean Statement to the left of the operator
self.left = left
## @var op
# Either && or ||
## @var token
# same as op
self.token = self.op = op
## @var left
# Boolean Expression to the right of the operator
self.right = right
## @var bound
# Boolean indicating whether or not this boolean is parenthesed
self.bound = bound
## Arguments
#
# A list of the arguments to an action. For example, the arguments
# to grasp is a list of one object: [obj]
class Args(AST):
## Constructor
def __init__(self):
## @var children
# list of arguments
self.children = []
## Action
#
# An action and its arguments
class Act(AST):
## Constructor
def __init__(self, var, args):
## @var var
# Name of the action (i.e grasp, release)
self.var = var
## @var args
# List of actions arguments
self.args = args
## Actions
#
# A List of actions
class Acts(AST):
## Constructor
def __init__(self):
## @var children
# List of actions
self.children = []
## Cause
#
# One causal statement. Act is the intention of the sequence of
# actions, acts
class Caus(AST):
## Constructor
def __init__(self, act, acts):
## @var act
# The intention
self.act = act
## @var acts
# The sequence of actions that reduce to act
self.acts = acts
## Conditional
#
# Conditional boolean statement that means the causal relation only
# holds if the boolean statement is true
class Cond(AST):
## Constructor
def __init__(self, boolean):
## @var boolean
# boolean guard to conditional
self.boolean = boolean
## Statement
#
# A statement is optionally a conditional followed by a causal
# relation
class Stmt(AST):
## Constructor
def __init__(self, cond, caus):
## @var cond
# Conditional to the causal relation. Could be empty
self.cond = cond
## @var caus
# Causal relation
self.caus = caus
## Statements
#
# A List of statements
class Stmts(AST):
## Constructor
def __init__(self):
## @var children
# List of statements
self.children = []
## Variable
#
# Variables are arguments to actions. They can be referenced in
# conditionals or in causal statements, but all derive from
# action arguments
class Var(AST):
## Constructor
def __init__(self, token):
## @var token
# token representing the variable
self.token = token
## @var token
# variable value (could be empty)
self.value = token.value
## No Conditional
#
# Represents the absence of a conditional
class NoCond(AST):
pass
## Parser
#
# Converts a string of tokens into an AST
class Parser(object):
## Constructor
def __init__(self, lexer):
## @var lexer
# Lexer that tokenizes input
self.lexer = lexer
## @var current_token
# the current token being parsed
self.current_token = self.lexer.get_next_token()
self.paren_balance = 0;
## Parser Error
#
# Alerts the user of invalid syntax indicated by a
# token that cannot be parsed into an AST object
def error(self, expected):
raise Exception('Invalid syntax: {expected} was expected, not {token}'.format(
token = self.current_token,
expected = expected
))
## Eat
#
# Advance to next token if there is a next token
def eat(self, token_type):
if self.current_token.type == token_type:
self.current_token = self.lexer.get_next_token()
else:
self.error(token_type)
## Program
#
# program -> RULES LCURLY stmts RCURLY
def program(self):
self.eat(RULES)
self.eat(LCURLY)
node = self.stmts()
self.eat(RCURLY)
return node
## Stmts
#
# stmts -> stmt
# | stmt SEMI stmts
def stmts(self):
node = self.stmt()
root = Stmts()
root.children.append(node)
while self.current_token.type == SEMI:
self.eat(SEMI)
root.children.append(self.stmt())
return root
## Stmt
#
# stmt -> cond caus
def stmt(self):
node1 = self.cond()
node2 = self.caus()
root = Stmt(cond=node1, caus=node2)
return root
## Cond
#
# cond -> IF LPAREN bools RPAREN COLON
# | empty
def cond(self):
if self.current_token.type == IF:
self.eat(IF)
self.eat(LPAREN)
node1 = self.bools()
self.eat(RPAREN)
self.eat(COLON)
root = Cond(boolean=node1)
else:
root = self.empty()
return root
## Caus
#
# caus -> act CAUSES acts
def caus(self):
node1 = self.act()
self.eat(CAUSES)
node2 = self.acts()
root = Caus(act=node1, acts=node2)
return root
## Acts
#
# acts -> act COMMA acts
# | act
def acts(self):
node = self.act()
root = Acts()
root.children.append(node)
while self.current_token.type == COMMA:
self.eat(COMMA)
root.children.append(self.act())
return root
## Act
#
# act -> var LPAREN args RPAREN
def act(self):
node1 = self.var()
self.eat(LPAREN)
node2 = self.args()
self.eat(RPAREN)
root = Act(var=node1, args=node2)
return root
## Args
#
# args -> var COMMA args
# | var
def args(self):
node = None;
if self.current_token.type == QUOTE:
self.eat(QUOTE)
node = Literal(name=self.var())
self.eat(QUOTE)
elif self.current_token.type == STATE:
self.eat(STATE)
self.eat(LPAREN)
node = State(args=self.args())
self.eat(RPAREN)
else:
node = self.var()
root = Args()
root.children.append(node)
while self.current_token.type == COMMA:
self.eat(COMMA)
if self.current_token.type == QUOTE:
self.eat(QUOTE)
node = Literal(name=self.var())
self.eat(QUOTE)
elif self.current_token.type == STATE:
self.eat(STATE)
self.eat(LPAREN)
node = State(args=self.args())
self.eat(RPAREN)
else:
node = self.var()
root.children.append(node)
return root
## Bools
#
# bools -> boolean AND bools
# | boolean OR bools
# | boolean
# | (bools) AND bools
# | (bools) OR bools
# | (bools)
def bools(self):
node = None;
node1 = None;
node2 = None;
token = None;
if self.current_token.type == LPAREN:
self.eat(LPAREN)
node1 = self.bools();
self.eat(RPAREN)
if self.current_token.type == AND:
token = self.current_token
self.eat(AND)
node2 = self.bools()
elif self.current_token.type == OR:
token = self.current_token
self.eat(OR)
node2 = self.bools()
node = BoolExpr(bound = True, left=node1, op=token, right=node2)
else:
node1 = self.boolean();
if self.current_token.type == 'AND':
token = self.current_token
self.eat(AND)
node2 = self.bools()
elif self.current_token.type == 'OR':
token = self.current_token
self.eat(OR)
node2 = self.bools()
node = BoolExpr(bound = False, left=node1, op=token, right=node2)
return node
## Boolean
#
# boolean -> expr EQUALS expr
# boolean -> expr
def boolean(self):
node1 = self.expr()
equals = True
if self.current_token.type == EQUALS :
token = self.current_token
self.eat(EQUALS)
elif self.current_token.type == NOTEQUAL :
token = self.current_token
self.eat(NOTEQUAL)
elif self.current_token.type == LESSTHAN :
token = self.current_token
self.eat(LESSTHAN)
elif self.current_token.type == GREATERTHAN :
token = self.current_token
self.eat(GREATERTHAN)
elif self.current_token.type == GREATEREQUAL :
token = self.current_token
self.eat(GREATEREQUAL)
elif self.current_token.type == LESSEQUAL :
token = self.current_token
self.eat(LESSEQUAL)
else:
equals = False
token = Token(PYTHON, PYTHON)
node2 = None
if equals:
node2 = self.expr()
root = Boolean(e1=node1, op=token, e2=node2)
return root
## Expr
#
# expr -> var
# | ALL LPAREN var RPAREN
# | TYPE LPAREN var RPAREN
# | QUOTE var QUOTE
# | LBRACK args RBRACK
def expr(self):
if self.current_token.type == ALL:
self.eat(ALL)
self.eat(LPAREN)
node = All(arg=self.var())
self.eat(RPAREN)
elif self.current_token.type == TYPE:
self.eat(TYPE)
self.eat(LPAREN)
node = Type(arg=self.var())
self.eat(RPAREN)
elif self.current_token.type == QUOTE:
self.eat(QUOTE)
node1 = self.var()
node = Literal(name=node1)
self.eat(QUOTE)
elif self.current_token.type == LBRACK:
self.eat(LBRACK)
node = self.args()
self.eat(RBRACK)
else:
node = self.var()
return node
## Var
#
# variable -> ID
# | integer
# | integer DOT integer (This is a float)
# | DOT integer (This is a float)
# | STATE LPAREN args RPAREN
# | PYTHON
def var(self):
ispos = True
if self.current_token.type == DASH:
self.eat(DASH)
ispos = False
if self.current_token.type == INTEGER:
node1 = self.integer()
if self.current_token.type == DOT:
self.eat(DOT)
node2 = self.integer()
node = Flt(left=node1, right=node2, sign=ispos)
else:
node = node1
node.sign = ispos
elif self.current_token.type == DOT:
node1 = Int(sign=True)
self.eat(DOT)
node2 = self.integer()
node = Flt(left=node1, right=node2, sign = ispos)
elif self.current_token.type == STATE:
self.eat(STATE)
self.eat(LPAREN)
node = State(args=self.args())
self.eat(RPAREN)
elif self.current_token.type == PYTHON:
node = Python(code=self.current_token.value)
self.eat(PYTHON)
else:
node = Var(self.current_token)
self.eat(ID)
return node
## Integer
#
# integer -> INTEGER integer
# | INTEGER
def integer(self):
root = Int(sign=True)
while self.current_token.type == INTEGER:
root.digits.append(Digit(self.current_token.value))
self.eat(INTEGER)
return root
## Empty
#
# empty ->
def empty(self):
return NoCond()
## Parse Program
#
# Actually runs the parser on the input
def parse(self):
node = self.program()
if self.current_token.type != EOF:
self.error()
return node | {
"repo_name": "jhomble/electron435",
"path": "python_causal_compiler/compiler/Parser.py",
"copies": "1",
"size": "12555",
"license": "mit",
"hash": 7876241018276855000,
"line_mean": 19.9266666667,
"line_max": 80,
"alpha_frac": 0.6434090004,
"autogenerated": false,
"ratio": 2.841140529531568,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.867424655519078,
"avg_score": 0.06206059494815769,
"num_lines": 600
} |
"""A stanza queue to prevent race conditions and other things.
"""
from twisted.internet import task
class StanzaQueue(object):
"""
"""
def __init__(self, cb_presence = None, cb_groupchat = None):
self.started = False
self.stz_pending = {}
self.delayed_queue = []
self.delayed_queue_call = None
self.onPresence = cb_presence
self.onGroupChat = cb_groupchat
self.onIqAdmin = None
def start(self):
if not self.started:
self.delayed_queue_call = task.LoopingCall(self._handleDelayedQueue)
self.delayed_queue_call.start(1)
self.started = True
def _handleDelayedQueue(self):
new_queue = []
while len(self.delayed_queue) > 0:
d = self.delayed_queue.pop()
if self.stz_pending.has_key(d['room'].lower()+d['stz']['from'].lower()):
# wait patiently
new_queue.append(d)
elif d['stz'].name == 'presence' and self.onPresence:
self.onPresence(d['stz'])
elif d['stz'].name == 'message' and self.onGroupChat:
self.onGroupChat(d['stz'])
elif d['stz'].name == 'iq' and self.onIqAdmin:
self.onIqAdmin(d['stz'])
self.delayed_queue = new_queue
def doDelay(self, room, frm, stz):
if self.stz_pending.has_key(room.lower()+frm.lower()):
# add to delayed queue
self.delayed_queue.append({'room': room, 'stz': stz})
return True
self.stz_pending[room.lower()+frm.lower()] = True
return False
def _delStzPending(self, room, user):
if self.stz_pending.has_key(room.lower()+user.lower()):
del self.stz_pending[room.lower()+user.lower()]
| {
"repo_name": "twonds/palaver",
"path": "palaver/stanzaqueue.py",
"copies": "1",
"size": "1854",
"license": "mit",
"hash": -7149211584615793000,
"line_mean": 31.5263157895,
"line_max": 84,
"alpha_frac": 0.5453074434,
"autogenerated": false,
"ratio": 3.715430861723447,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.47607383051234464,
"avg_score": null,
"num_lines": null
} |
"""A-Star (A*) Search is a search strategy where, starting at the root of a tree
of nodes to explore, each node's children are explored in order of a combination
of lowest movement cost, with cost accumulating per move and a heuristic cost.
Priority of exploration is given to those remaining with lowest combined cost.
"""
class Node:
"""A node represents a particular point in search space, contained within
the tree of state spaces to search for a goal state.
"""
"""Instantiate with a state representation, a link to a parent node. the
heuristic cost associated with the node's state and a record of the lowest
movement cost to reach that node currently found.
"""
def __init__(self, state, parent, movement_cost, heuristic_cost):
self.state = state
self.parent = parent # for reconstructing success path
self.movement_cost = movement_cost
self.heuristic_cost = heuristic_cost
"""Returns the total cost associated with the node."""
def cost(self):
return self.movement_cost + self.heuristic_cost
class Heap:
"""A Heap (or Priority Queue) is used to order nodes to search, prioritising
nodes with minimum current cost for extraction.
"""
"""Instantiate with empty contents and a maximum reached size of zero."""
def __init__(self):
self.contents = []
self.max = 0 # output statistic, not used in search
"""Add an item to the Heap."""
def insert(self, element):
self.contents.append(element)
# If the Heap is the largest it has been, update max.
if len(self.contents) > self.max:
self.max = len(self.contents)
"""Update the cost of any node which is found to have a cheaper movement cost
to reach, and update that node's parent also.
"""
def update_costs(self, state, parent, movement_cost):
for node in self.contents:
if node.state == state and node.movement_cost > movement_cost:
node.movement_cost = movement_cost
node.parent = parent
"""Remove and return the item with lowest current cost from the Heap."""
def extract_min(self):
values = [node.cost() for node in self.contents]
idx = values.index(min(values))
return self.contents.pop(idx)
class Astar:
"""This class implements the A-Star (A*) Search strategy."""
"""Instantiate with a subject to search, an empty Heap to store nodes
currently on the frontier of search space, an empty array to store nodes
that have been explored, the current movement cost of state for accumulation
and a variable to store a node with the goal state.
"""
def __init__(self, subject):
self.subject = subject
self.frontier = Heap()
self.explored = []
self.success_node = None # for reconstructing success path
self.current_cost = 0
"""Executes the search strategy and returns a boolean indicating success."""
def search(self):
# add the initial state to the Heap
initial_state = self.subject.current_state()
initial_cost = self.subject.heuristic_cost()
self.frontier.insert(Node(initial_state, None, 0, initial_cost))
while self.frontier.contents:
# remove the next item from the Heap and explore it
node = self.frontier.extract_min()
self.explored.append(node)
# update the current cost for accumulation to children states
self.current_cost = node.movement_cost
self.subject.set_state(node.state)
# if the node we are exploring matches the goal state, we are done
if self.subject.current_state() == self.subject.goal_state():
self.success_node = node
return True
# otherwise finish exploring the node by adding it's next states to
# the frontier
self._add_next_states(node)
# if we have explored every node and not found the solution, we are done
return False
"""Returns statistics describing the search."""
def results(self):
if self.success_node:
# construct the success path
path = [self.success_node]
parent = self.success_node.parent
while parent:
path.append(parent)
parent = parent.parent
path.reverse()
else:
path = []
costs = lambda x: (x.state, x.movement_cost, x.heuristic_cost, x.cost())
return {
"cost": self.current_cost,
"path": [costs(node) for node in path],
"path length": len(path),
"number of visited nodes": len(self._visited_nodes()),
"current frontier nodes": [costs(node) for node in self.frontier.contents],
"number of frontier nodes": len(self.frontier.contents),
"max number of frontier nodes": self.frontier.max
}
"""Visited nodes includes explored nodes and nodes on the frontier."""
def _visited_nodes(self):
return self.explored + self.frontier.contents or [] # must return a list
"""Adds states that can be reached from the current state to the Heap."""
def _add_next_states(self, parent):
for state in self.subject.next_states():
movement_cost = self.current_cost
# only add states that have never been visited
if [node.state for node in self._visited_nodes()].count(state) == 0:
movement_cost += self.subject.move_cost(self.subject.current_state(), state)
heuristic_cost = self.subject.heuristic_cost(state)
self.frontier.insert(Node(state, parent, movement_cost, heuristic_cost))
# though also check to update the cost of any state we can now reach
# at lower cost
elif [node.state for node in self.frontier.contents].count(state) == 1:
movement_cost += self.subject.move_cost(self.subject.current_state(), state)
self.frontier.update_costs(state, parent, movement_cost)
| {
"repo_name": "PhilipCastiglione/learning-machines",
"path": "uninformed_search/algorithms/astar.py",
"copies": "1",
"size": "6146",
"license": "mit",
"hash": 7661744018113273000,
"line_mean": 41.6805555556,
"line_max": 92,
"alpha_frac": 0.6332574032,
"autogenerated": false,
"ratio": 4.2009569377990434,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0036073080827908302,
"num_lines": 144
} |
"astar.py - A* pathfinding module for Pyro"
# This module assumes movement on a grid, rather than an arbitrary graph.
from util import *
from heapq import heappush, heappop
from math import sqrt
class NodeList(BASEOBJ):
def __init__(self, name="nodelist"):
self.nodes, self.idx = [], {}
self.name = name
def has(self, x, y):
return self.idx.has_key((x, y))
def add(self, x, y, cost, h, parent_x, parent_y):
if self.has(x, y):
raise ValueError
node = [cost+h, cost, h, x, y, (parent_x, parent_y)]
heappush(self.nodes, node)
self.idx[(x, y)] = node
def remove(self, x, y):
self.nodes = [n for n in self.nodes if (n[3], n[4]) != (x, y)]
del self.idx[(x, y)]
def pop(self):
node = heappop(self.nodes)
del self.idx[(node[3], node[4])]
return node
def node(self, x, y):
return self.idx[(x, y)]
def path_from(self, x, y):
# Return a path from the given node back to the start:
node = self.node(x, y)
path = []
while node[5] != (None, None):
path.insert(0, (x, y))
x, y = node[5]
node = self.node(x, y)
return path
def best_path_so_far(self):
# Return the path to the closest node we found (lowest heuristic):
lowest = 99999
for n in self.nodes:
if n[2] < lowest:
lowest = n[2]
x, y = n[3], n[4]
return self.path_from(x, y)
def path(start_x, start_y, dest_x, dest_y, passable, max_length=99999):
# passable is a function(x,y) returning whether a node is passable.
open = NodeList("Open")
h = max(abs(dest_x - start_x), abs(dest_y - start_y))
open.add(start_x, start_y, 0, h, None, None)
closed = NodeList("Closed")
length=0
while len(open.nodes) > 0:
node = open.pop()
node_cost_h, node_cost, node_h, node_x, node_y, node_parent = node
if node_cost > max_length:
# We've failed to find a short enough path; return the best we've got:
break
# Put the parent node in the closed set:
closed.add(node_x, node_y, node_cost, node_h, node_parent[0], node_parent[1])
# See if we're at the destination:
if (node_x, node_y) == (dest_x, dest_y):
# We found the path; return it:
p = closed.path_from(node_x, node_y)
return p
# Check adjacent nodes:
for i in xrange(node_x -1 , node_x + 2):
for j in xrange(node_y - 1, node_y + 2):
dx, dy = i - node_x, j - node_y
# Skip the current node:
if (i, j) == (node_x, node_y): continue
# If this node is impassable, disregard it:
if not passable(i, j): continue
# Calculate the heuristic:
h = max(abs(dest_x - i), abs(dest_y - j))
# Calculate the move cost; assign slightly more to diagonal moves
# to discourage superfluous wandering:
if dx==0 or dy==0:
move_cost = 1
else:
move_cost = 1.001
cost = node_cost + move_cost
# See if it's already in the closed set:
if closed.has(i, j):
c = closed.node(i, j)
if cost < c[1]:
open.add(i, j, cost, h, node_x, node_y)
closed.remove(i, j)
else:
# It's not in the closed list, put it in the open list if it's not already:
if not open.has(i, j):
open.add(i, j, cost, h, node_x, node_y)
# We ran out of open nodes; pathfinding failed:
# Do the best we can:
p = closed.best_path_so_far()
return p
| {
"repo_name": "amirjabri/pyro",
"path": "astar.py",
"copies": "1",
"size": "4017",
"license": "mit",
"hash": 8904570144837521000,
"line_mean": 37.7920792079,
"line_max": 95,
"alpha_frac": 0.4934030371,
"autogenerated": false,
"ratio": 3.5205959684487294,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4513999005548729,
"avg_score": null,
"num_lines": null
} |
# AStar.py
# A* Search of a problem space.
# Ver 0.1, October 19, 2017.
# Usage:
# python3 AStar.py EightPuzzleWithHeuristics h_euclidean puzzle2a
import sys
from priorityq import PriorityQ
if sys.argv == [''] or len(sys.argv) < 3:
import EightPuzzleWithHeuristics as Problem
CHOSEN_HEURISTIC = 'h_manhattan'
INITIAL_STATE = Problem.CREATE_INITIAL_STATE()
h_score_fn = Problem.HEURISTICS[CHOSEN_HEURISTIC] # scoring function
else:
import importlib
Problem = importlib.import_module(sys.argv[1])
CHOSEN_HEURISTIC = sys.argv[2]
initial_state_file = importlib.import_module(sys.argv[3])
INITIAL_STATE = initial_state_file.CREATE_INITIAL_STATE()
h_score_fn = Problem.HEURISTICS[CHOSEN_HEURISTIC] # scoring function
print("\nWelcome to A Star Search")
COUNT = None
BACKLINKS = {}
def runAStar():
initial_state = INITIAL_STATE
print("Initial State:")
print(initial_state)
global COUNT, BACKLINKS
COUNT = 0
BACKLINKS = {}
AStar(initial_state)
print(str(COUNT) + " states examined.")
def AStar(initial_state):
# print("In RecDFS, with depth_limit="+str(depth_limit)+", current_state is ")
# print(Problem.DESCRIBE_STATE(current_state))
global COUNT, BACKLINKS
#Tracks most efficient previous step
BACKLINKS[initial_state] = None
#already evaluated states
CLOSED = []
#currently discovered, not yet evaluated states
OPEN = PriorityQ()
#Calculate F, G, H scores
initialize_scores(initial_state)
#Only initial node is known as of now
OPEN.insert(initial_state, F_SCORE[initial_state])
while OPEN.isEmpty() !=True:
S = OPEN.deletemin()
CLOSED.append(S)
if Problem.GOAL_TEST(S):
print(Problem.GOAL_MESSAGE_FUNCTION(S))
backtrace(S)
return #FOUND GOAL
COUNT += 1
if (COUNT % 32)==0:
# if True:
# print(".",end="")
# if (COUNT % 128*128)==0:
if True:
print("COUNT = " + str(COUNT))
#print("len(OPEN)=" + str(len(OPEN))) #PriorityQ OPEN doesn't have len()
print("len(CLOSED)=" + str(len(CLOSED)))
for op in Problem.OPERATORS:
if op.precond(S):
new_state = op.state_transf(S)
if not occurs_in(new_state, CLOSED): #ignore already evaluated neighbors
#find tentative score of neighbor
tentative_g_score = G_SCORE[S] + 1
if new_state not in G_SCORE: #Default INFINITY
BACKLINKS[new_state] = S #First known path to new_state
elif tentative_g_score >= G_SCORE[new_state]:
continue #current path is not the best path to the neighbor
else:
BACKLINKS[new_state] = S #Found better path to new_State
G_SCORE[new_state] = tentative_g_score
F_SCORE[new_state] = G_SCORE[new_state] + h_score_fn(new_state)
# discovered a new State
if not OPEN.__contains__(new_state):
OPEN.insert(new_state, F_SCORE[new_state])
# print(Problem.DESCRIBE_STATE(new_state))
#print(OPEN)
#Failure, if goal_test has not succeeded until now
print("COULD NOT FIND GOAL")
return
def initialize_scores(start_state):
reset_Scores()
G_SCORE[start_state] = 0
H_SCORE[start_state] = h_score_fn(start_state)
F_SCORE[start_state] = H_SCORE[start_state]
def reset_Scores():
"""
Reset just in case run AStar multiple times
"""
G_SCORE = {}
F_SCORE = {}
H_SCORE = {}
def print_state_list(name, lst):
print(name + " is now: ", end='')
for s in lst[:-1]:
print(str(s), end=', ')
print(str(lst[-1]))
def backtrace(S):
global BACKLINKS
path = []
while S:
path.append(S)
# print("In backtrace, S is now: "+str(S))
S = BACKLINKS[S]
path.reverse()
print("Solution path: ")
for s in path:
print(s)
return path
def occurs_in(s1, lst):
for s2 in lst:
if s1 == s2: return True
return False
if __name__ == '__main__':
runAStar() | {
"repo_name": "vaibhavi-r/CSE-415",
"path": "Assignment3/AStarOld.py",
"copies": "1",
"size": "4298",
"license": "mit",
"hash": -3426961651673970000,
"line_mean": 27.2828947368,
"line_max": 88,
"alpha_frac": 0.5805025593,
"autogenerated": false,
"ratio": 3.4247011952191233,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4505203754519123,
"avg_score": null,
"num_lines": null
} |
# astar.py
# trying draft version
import time, sys, heapq
import numpy as np
import pygame as pg
from pg_part import *
#import cv2
from operator import attrgetter
from pygame.locals import *
''' source - red
wall - black
target - blue
path - green '''
# --------------------------------------------------------------------------------------- #
# ------------------------- class CELL ---------------------------- #
# --------------------------------------------------------------------------------------- #
class Cell(object):
#a very simple class simply used to store information
def __init__(self, x,y,reachable):
self.x, self.y = x,y
self.g, self.h, self.f = 0,0,0
self.reachable = reachable
self.parent = None
def cell_xy(self):
return (self.x,self.y)
def cell_info(self):
return "({0},{1}), g = {2}, h= {3}, f= {4}, wall = {5}, parent = {6})".format(self.x,self.y,self.g,self.h,self.f,self.reachable,self.parent)
def __str__(self):
return "Cell({0},{1})".format(self.x,self.y)
# --------------------------------------------------------------------------------------- #
# ------------------------- class GRID ---------------------------- #
# --------------------------------------------------------------------------------------- #
class Grid(object):
def __init__(self,grid_size):
self.grid_size = grid_size
self.open_cells = []
self.closed_cells = set()
self.cells = []
self.cell_array = []
def init_grid(self,walls=[]):
#self.start = start
#self.end = end
self.walls = walls
for i in xrange(self.grid_size):
for j in xrange(self.grid_size):
if (i,j) in self.walls:
reachable = False
else:
reachable = True
self.cells.append(Cell(i,j,reachable))
self.cell_array = np.array(self.cells).reshape((self.grid_size,self.grid_size))
def cell_from_xy(self,x,y):
""" get the cell from its x,y values"""
return self.cell_array.item((x,y))
def get_h(self,cell,target = Cell(10,10,True)):
"""Gets the estimated length of the path from a node
using the Manhatten Method.
@param cell - cell in consideration
@param end_cell - target
@returns the manhattan distance."""
return ( abs(cell.x-target.x) + abs(cell.y-target.y) )
def get_adjcells(self,cell):
""" gets the adjacent cells of a cell"""
adj_cells = []
cells_xy = []
if cell.x > 0:
adj_cells.append(self.cell_array.item((cell.x-1,cell.y)))
if cell.x < self.grid_size - 1:
adj_cells.append(self.cell_array.item((cell.x+1,cell.y)))
if cell.y > 0:
adj_cells.append(self.cell_array.item((cell.x,cell.y-1)))
if cell.y < self.grid_size - 1:
adj_cells.append(self.cell_array.item((cell.x,cell.y+1)))
return adj_cells
def minimum_f_cell(self):
""" return the cell with minimum f_score from open cells"""
return sorted(self.open_cells,key = lambda cell: cell.f)[0]
# --------------------------------------------------------------------------------------- #
# ------------------------- other functions ---------------------------- #
# --------------------------------------------------------------------------------------- #
def retrace_path(target,closed_cells):
path = []
closed_cells = list(closed_cells)
cell = target
while (True):
closed_cells.remove(cell)
path.append(cell)
cell = cell.parent
if cell == None : # break loop if start cell
#path.append(cell)
break
return path
#for x in path:
# print x, x.parent
# --------------------------------------------------------------------------------------- #
# ------------------------- original A* function ---------------------------- #
# --------------------------------------------------------------------------------------- #
def process(start,target,grid,walls,screen):
""" Original A* algorithm """
## pg.init(); pg.display.set_caption("A* Maze")
## #grid = creategrid(dim)
## screen = pg.display.set_mode((1000,700))
##
## screen.fill((255,255,255))
## for r in xrange(1,100):
## pg.draw.line(screen,(0,0,0),(r*10,0),(r*10,1000),1)
## pg.draw.line(screen,(0,0,0),(0,r*10),(1000,r*10),1)
## for wall in walls:
## x,y = wall
## rect = pg.Rect(x*10,y*10,10,10)
## pg.draw.rect(screen,(0,0,0),rect)
## pg.display.update()
found_path = False
route = []
start.h = grid.get_h(start,target)
start.g = 0
start.f = 2*start.h+start.g
grid.open_cells.append(start) # first push start to open_cells
cells_processed = 1
while(grid.open_cells != []): # if open_cells is not empty
#
# Below line is the sorting according to f and h
#
grid.open_cells = sorted(grid.open_cells,key = attrgetter('f','h','y'))
current_cell = grid.open_cells.pop(0)
grid.closed_cells.add(current_cell) # add start to closed cells
color = np.random.randint(0,256,3).tolist()
draw_cell(screen,current_cell,(0,0,255))
adj_cells = grid.get_adjcells(current_cell) # get the adjacent cells of start and put them in open cells
if found_path == True:
break
for cell in adj_cells:
if cell.cell_xy() == target.cell_xy() and found_path == False: # if cell is target, job done
target.parent = current_cell
grid.closed_cells.add(target)
route = retrace_path(target,grid.closed_cells)
for cell in route:
draw_cell(screen,cell,(0,255,0))
cells_processed += 1
found_path = True
elif cell not in grid.closed_cells and cell.reachable == True and found_path == False: # or check if cell in closed_cells or walls
draw_cell(screen,cell,(255,0,0))
cell.h = grid.get_h(cell,target) # update the values of cell
cell.g = current_cell.g+1
cell.f = 2*cell.h + cell.g
if cell not in grid.open_cells: # if cell not in open_cells, add it, update parent
cells_processed += 1
grid.open_cells.append(cell)
cell.parent = current_cell
else: # or update the already present one.
old_cell = grid.open_cells[grid.open_cells.index(cell)] # present is the cell already in open cells
if cell.f < old_cell.f: # if new.f < old.f, replace old.parent by new.parent
old_cell.parent = current_cell
pg.time.delay(50)
print "cells_processed = ", cells_processed
return route
def MainGui():
#walls = ((0, 5), (1, 0), (1, 1), (1, 5), (2, 3),(3, 1), (3, 2), (3, 5), (4, 1), (4, 4), (5, 1),(19,20),(20,19))
walls = []
#walls = ((0,1),(1,1),(1,0))
grid = Grid(500)
#grid.init_grid(walls)
pg.init(); pg.display.set_caption("A* Maze")
#grid = creategrid(dim)
screen = pg.display.set_mode((1000,700))
screen.fill((255,255,255))
for wall in walls:
x,y = wall
rect = pg.Rect(x*10,y*10,10,10)
pg.draw.rect(screen,(0,0,0),rect)
draw_grid(screen)
pg.display.update()
start_track = False
while True:
events = pg.event.get()
if start_track == False:
for e in events:
mousex = (pg.mouse.get_pos()[0])/10
mousey = (pg.mouse.get_pos()[1])/10
if e.type == KEYDOWN:
if e.key == K_ESCAPE:
pg.quit()
sys.exit()
elif (e.key == K_s): # hold mouse and press s to set source
start = Cell(mousex,mousey,True)
draw_cell(screen,start,(255,0,0))
elif (e.key == K_t): # hold mouse and press t to set target
target = Cell(mousex,mousey,True)
draw_cell(screen,target,(0,0,255))
elif (e.key == K_SPACE): # press SPACE to find the track
start_track = True
elif (e.key == K_c): # press c to clear window
screen.fill((255,255,255))
draw_grid(screen)
pg.display.update()
elif e.type == MOUSEBUTTONDOWN:
if pg.mouse.get_pressed()[0] == 1:
walls.append([mousex,mousey])
pg.draw.rect(screen,(0,0,0),(mousex*10,mousey*10,10,10))
if start_track == True:
grid.init_grid(walls)
path = process(start,target,grid,walls,screen)
start_track = False
del start,target
#grid.open_cells = [],grid.closed_cells = set()
draw_grid(screen)
pg.display.update()
if __name__ == '__main__':
#start = Cell(0,0,True)
#test = Cell(5,5,True)
#target = Cell(20,20,True)
# walls = ((0, 5), (1, 0), (1, 1), (1, 5), (2, 3),(3, 1), (3, 2), (3, 5), (4, 1), (4, 4), (5, 1),(19,20),(20,19))
# #walls = []
# #walls = ((0,1),(1,1),(1,0))
# grid = Grid(500)
# grid.init_grid(walls)
#path = process(start,target,grid,walls)
#print path
MainGui()
| {
"repo_name": "abidrahmank/MyRoughWork",
"path": "maze_solver/rough/astar.py",
"copies": "1",
"size": "10016",
"license": "mit",
"hash": -6322901798522394000,
"line_mean": 33.4192439863,
"line_max": 148,
"alpha_frac": 0.455471246,
"autogenerated": false,
"ratio": 3.7654135338345864,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9579178930284447,
"avg_score": 0.028341169910028014,
"num_lines": 291
} |
import time
import threading
import smbus
import struct
import sys
class AStar:
def __init__(self):
self._bus = smbus.SMBus(1)
self._version = sys.version_info.major
self.ledYellow = 0
self.ledGreen = 0
self.ledRed = 0
self._buttonA = 0
self._buttonB = 0
self._buttonC = 0
self._fwdSpeed = 0
self._turnRate = 0
self._lockSpeeds = False
self._x = 0
self._y = 0
self._phi = 0
self._lockOdometer = False
self._batteryMV = 0
self._lockBattery = False
self._panServo = 0 # Servo is disabled by default
self._tiltServo = 0 # Servo is disabled by default
self._mastServo = 0 # Servo is disabled by default
self._lockServos = False
self._notes = ''
self._resetOdometer = True
self.run()
# Wait to ensure we can read/write the buffer once before starting
time.sleep(.05)
# Print battery level
print("RPB202")
print("Battery level: " + str(round(self.getBatteryVolts(), 2)) + "V")
def _read_unpack(self, address, size, format):
"""Reads data from the I2C bus."""
self._bus.write_byte(20, address)
time.sleep(0.0001)
byte_list = [self._bus.read_byte(20) for _ in range(size)]
if self._version == 3:
# Python version 3
return struct.unpack(format, bytes(byte_list))
else:
# Python version 2
return struct.unpack(format, bytes(bytearray(byte_list)))
def _write_pack(self, address, format, *data):
"""Writes data to the I2C bus."""
if self._version == 3:
# Python version 3
data_array = list(struct.pack(format, *data))
else:
# Python version 2
data_array = map(ord, list(struct.pack(format, *data)))
self._bus.write_i2c_block_data(20, address, data_array)
time.sleep(0.0001)
def close(self):
"""Stops the I2C communication with the A-Star controller. This method
also stops the motors and turns off the A-Star LEDs."""
# Stop the running thread
self._active = False
# Stop the motors
self.setSpeeds(0, 0)
# Write the motors speeds directly to the I2C bus
self._write_pack(6, 'hh', 0, 0)
# Turn LEDs off
self.setYellowLED(0)
self.setGreenLED(0)
self.setRedLED(0)
# Write the LED values directly to the I2C bus
self._write_pack(0, 'BBB', 0, 0, 0)
def run(self):
"""Starts continuous I2C communication with A-Star controller in a
dedicated thread."""
self._active = True
th = threading.Thread(target = self._run, args = [])
th.start()
def _run(self):
"""Runs continuous I2C communication with A-Star controller. Runs as
long as AStar._active attribute is True. Call AStar.close() to stop the
thread."""
while self._active:
try:
# Read from buffer
# Buttons
self._buttonA, self._buttonB, self._buttonC = \
self._read_unpack(3, 3, '???')
# Odometer
self._lockOdometer = True
self._x, self._y, phi = self._read_unpack(10, 6, 'hhh')
# Convert phi reading from 1/1000 of radians to radians
self._phi = phi / 1000.
self._lockOdometer = False
# Battery level
self._lockBattery = True
self._batteryMV = self._read_unpack(17, 2, 'H')[0]
self._lockBattery = False
# Write to buffer
# Reset odometer on start-up
if self._resetOdometer:
self._resetOdometer = False
self._write_pack(16, 'B', 1)
time.sleep(.02)
else:
self._write_pack(16, 'B', 0)
# LEDs
self._write_pack(0, 'BBB', self.ledYellow, self.ledGreen, \
self.ledRed)
# Servos
self._lockServos = True
self._write_pack(34, 'HHH', self._panServo, self._tiltServo, \
self._mastServo)
self._lockServos = False
# Notes
if self._notes != "":
self._write_pack(19, 'B15s', 1, self._notes.encode('ascii'))
self._notes = ""
# Motors (turn rate in 1/1000 of radians / s)
self._lockSpeeds = True
turnRate = int(self._turnRate * 1000)
self._write_pack(6, 'hh', self._fwdSpeed, turnRate)
self._lockSpeeds = False
except IOError:
# Handle I2C communication error
raise IOError("IOError in AStar class")
self.close()
def buttonAIsPressed(self):
"""Returns True if the A-Star button A is pressed, False otherwise."""
return self._buttonA
def buttonBIsPressed(self):
"""Returns True if the A-Star button B is pressed, False otherwise."""
return self._buttonB
def buttonCIsPressed(self):
"""Returns True if the A-Star button C is pressed, False otherwise."""
return self._buttonC
def getBatteryVolts(self):
"""Returns the robot battery level in Volts."""
while self._lockBattery:
# Wait while battery attribute is locked
pass
return self._batteryMV / 1000.
def getOdometerPhi(self):
"""Returns the phi angle of the robot from the odometer in radians
(0 <= phi < 2*Pi). 0 corresponds to the robot pointing in the positive x
direction. The angle increases turning in direction of the positive y
axis (left turn).
"""
while self._lockOdometer:
# Wait while odometer attributes are locked
pass
return self._phi
def getOdometerXY(self):
"""Returns the x and y position of the robot from the odometer in mm."""
while self._lockOdometer:
# Wait while odometer attributes are locked
pass
return self._x, self._y
def setYellowLED(self, value = 0):
"""Sets the A-Star yellow led status (0 = Off, 1 = On)."""
if value == 0:
self.ledYellow = 0
else:
self.ledYellow = 1
def setGreenLED(self, value = 0):
"""Sets the A-Star green led status (0 = Off, 1 = On)."""
if value == 0:
self.ledGreen = 0
else:
self.ledGreen = 1
def setRedLED(self, value = 0):
"""Sets the A-Star red led status (0 = Off, 1 = On)."""
if value == 0:
self.ledRed = 0
else:
self.ledRed = 1
def setPanServo(self, us_4 = 0):
"""Sets the pan servo pulse width value in quarter-microseconds."""
while self._lockServos:
# Wait while servos attributes are locked
pass
self._panServo = us_4
def setTiltServo(self, us_4 = 0):
"""Sets the tilt servo pulse width value in quarter-microseconds."""
while self._lockServos:
# Wait while servos attributes are locked
pass
self._tiltServo = us_4
def setMastServo(self, us_4 = 0):
"""Sets the mast servo pulse width value in quarter-microseconds."""
while self._lockServos:
# Wait while servos attributes are locked
pass
self._mastServo = us_4
def playNotes(self, notes):
"""Play the specified notes on the A-Star buzzer. Refer to the Pololu
Buzzer documentation for details on how to use the buzzer."""
self._notes = notes
def resetOdometer(self):
"""Resets the odometer on the A-Star."""
self._resetOdometer = True
def setSpeeds(self, fwdSpeed = 0, turnRate = 0):
"""Sets the robot speed in mm/s and turn rate in radians/s"""
while self._lockSpeeds:
# Wait while speds attributes are locked
pass
self._fwdSpeed = fwdSpeed
self._turnRate = turnRate
| {
"repo_name": "DrGFreeman/RasPiBot202V2",
"path": "pi/astar.py",
"copies": "1",
"size": "9666",
"license": "mit",
"hash": 7609489151109617000,
"line_mean": 35.1985018727,
"line_max": 80,
"alpha_frac": 0.5750051728,
"autogenerated": false,
"ratio": 4.0107883817427386,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0025066810457911506,
"num_lines": 267
} |
# A starter kit for the graph/matrix exercise
import collections
def adjlist(adj_list):
"""
Read in adj list and store in form of dict mapping node
name to list of outgoing edges. Preserve the order you find
for the nodes.
"""
adj = collections.OrderedDict() # keep stuff in order read from string
# fill in
return adj
def adjmatrix(adj):
"""
From an adjacency list, return the adjacency matrix with entries in {0,1}.
The order of nodes in adj is assumed to be same as they were read in.
"""
n = len(adj)
A = [[0] * n for i in range(n)]
# fill in
return A
def nodes(adj, start_node):
"""
Walk every node in graph described by adj list starting at start_node
using a breadth-first search. Return a list of all nodes found (in
any order). Include the start_node.
"""
nodes = []
visited = set()
work = set()
# fill in
return nodes
def gendot(adj):
"""
Return a string representing the graph in Graphviz DOT format
with all p->q edges. Parameter adj is an adjacency list.
"""
dot = "digraph g {\n"
dot += " rankdir=LR;\n"
# fill in
dot += "}\n"
return dot
| {
"repo_name": "chhavi21/msan501",
"path": "labs/code/graphs/graph.py",
"copies": "3",
"size": "1101",
"license": "bsd-3-clause",
"hash": -5451312958057006000,
"line_mean": 19.7735849057,
"line_max": 75,
"alpha_frac": 0.6811989101,
"autogenerated": false,
"ratio": 3.101408450704225,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5282607360804226,
"avg_score": null,
"num_lines": null
} |
#a star with color
import time
import pygame
from pygame.locals import *
from operator import itemgetter
class node(object):
#a very simple class simply used to store information
def __init__(self, x, y, parent, gscore):
self.x, self.y = x,y # position on the grid
self.parent = parent # pointer to a parent node
self.gscore = gscore # gscore (movement cost)
self.closed = False # on closed list? y/n
def getdircost(loc1,loc2):
if loc1[0] - loc2[0] != 0 and loc1[1] - loc2[1] != 0:
return 14 # diagnal movement
else:
return 10 # horizontal/vertical movement
def get_h_score(start,end):
"""Gets the estimated length of the path from a node
using the Manhatten Method."""
#uses a heuristic function
#return 0 #used if you want dijkstras (sp?) algorithm
return (abs(end[0]-start[0])+abs(end[1]-start[1])) * 10
def get_points(node):
"gets the points to draw an arrow from a child to a parent"
L = 4
start = (node.parent.x,node.parent.y)
end = ()
ex = node.x
ey = node.y
if start == (ex-1,ey-1): end = (ex*10-L, ey*10-L)
elif start ==(ex-1,ey) : end = (ex*10-L, ey*10)
elif start == (ex-1,ey+1): end = (ex*10-L, ey*10+L)
elif start == (ex,ey-1): end = (ex*10, ey*10-L)
elif start == (ex,ey+1): end = (ex*10, ey*10+L)
elif start == (ex+1,ey+1): end = (ex*10+L, ey*10+L)
elif start == (ex+1,ey): end = (ex*10+L, ey*10)
elif start == (ex+1,ey-1): end = (ex*10+L, ey*10-L)
return ((ex*10+5,ey*10+5),(end[0]+5,end[1]+5))
def draw_arrow((start,end), screen):
"Calulates how to draw an arrow"
#each arrow starts at a child and points to its parent
pygame.draw.line(screen, (255,255,255),(start),(end))
sides = ((0,0),(0,0))
if start[0] > end[0] and start[1] == end[1]:#arrow left
sides = ((2,-2),(2,2))
if start[0] == end[0] and start[1] > end[1]:#arrow up
sides = ((-2,2),(2,2))
if start[0] < end[0] and start[1] == end[1]:#arrow right
sides = ((-2,2),(-2,-2))
if start[0] == end[0] and start[1] < end[1]:#arrow down
sides = ((-2,-2),(2,-2))
if start[0] > end[0] and start[1] > end[1]:#arrow up/left
sides = ((2,0),(0,2))
if start[0] < end[0] and start[1] > end[1]:#arrow up/right
sides = ((-2,0),(0,2))
if start[0] < end[0] and start[1] < end[1]:#arrow down/right
sides = ((-2,0),(0,-2))
if start[0] > end[0] and start[1] < end[1]:#arrow down/left
sides = ((2,0),(0,-2))
pygame.draw.line(screen, (255,255,255),(end),(end[0]+sides[0][0],
end[1]+sides[0][1]))
pygame.draw.line(screen, (255,255,255),(end),(end[0]+sides[1][0],
end[1]+sides[1][1]))
def create_path(s, end, grid1,grid2,screen,IsBlack, isSecondLayer):
"Creates the shortest path between s (start) and end."
if isSecondLayer == False:
grid = grid1
else:
grid = grid2
# the ons list is a 2d list of node status
# None means the node has not been checked yet
# a node object for a value means it is on the open list
# a False value means that it is on the closed list
ons = [[None for y in xrange(len(grid[x]))] for x in xrange(len(grid))]
#n is the current best node on the open list, starting with the initial node
n = node(s[0],s[1], None,0)
#we store the fscores of the nodes and the nodes themselves in a binary heap
#we don't want a binary heap here because it seems to be less consistent (don't know why exactly)
#than a simple list.
count = 0
openl = []
closedList = []#list to be redrawn to green
geth = get_h_score
start_time = time.time()
while (n.x, n.y) != end:
#search adjacent nodes
#if the node is already on the open list, then
#and change their pointer the the current node
#if their path from current node is shorter than their
#previous path from previous parent
#if the node is not on the open list and is not a wall,
#add it to the open list
for x in xrange(n.x -1, n.x +2):
for y in xrange(n.y -1 , n.y + 2):
#the checked node can't be our central node
if (x,y) != (n.x,n.y):
#To ignore the diagonal nodes
if x == n.x or y == n.y:
if grid[x][y] == True:
#if the node is not on the closed list or open list
if ons[x][y] != None:
if ons[x][y].closed == False:
#get cost of the new path made from switching parents
#new_cost = getdircost((n.x,n.y),(x,y)) + n.gscore
# if the path from the current node is shorter
#if new_cost <= ons[x][y].gscore:
#h = geth((x,y),end)
#n + h
#find the index of the node
#to change in the open list
index = openl.index([ons[x][y].gscore,
ons[x][y]])
#if the node is not a wall and not on the closed list
#then simply add it to the open list
else:
h = geth((x,y),end)
#movement score gets the direction cost
#added to the parent's directional cost
g = getdircost((n.x,n.y),(x,y)) + n.gscore
length = len(openl)
ons[x][y] = node(x, y, n, g)
#update the ons list and the
#fscore list in the list
#openl[length][0] = g
#ons[x][y] = openl[length][1]
openl.append([g,ons[x][y]])
#drawing blue nodes
if isSecondLayer != True and grid2[x][y] == True:
pygame.draw.rect(screen,(0,0,255),(x*10,y*10,10,10))
#draw_arrow(get_points(ons[x][y]),screen)
pygame.display.update(x*10,y*10,10,10)
elif isSecondLayer == True and grid1[x][y] == True:
pygame.draw.rect(screen,(0,0,255),(x*10,y*10,10,10))
#draw_arrow(get_points(ons[x][y]),screen)
pygame.display.update(x*10,y*10,10,10)
#if the length of the open list is zero(all nodes on closed list)
#then return an empty path list
#print "cells_processed = ", len(openl)
if len(openl) == 0:
n = None
break
##############
#openl = sorted(openl,key = itemgetter(0,1))
#n = min(openl)
n = openl.pop(0)
count = count + 1
#openl.remove(n)
n = n[1]
##############
if isSecondLayer != True and grid2[n.x][n.y] == True:
#draw some stuff
pygame.draw.rect(screen, (255,255,255),(s[0]*10,s[1]*10,10,10))
pygame.display.update(s[0]*10,s[1]*10,10,10)
#drawing red nodes
pygame.draw.rect(screen,(255,100,0),(n.x*10,n.y*10,10,10))
#draw_arrow(get_points(n),screen)
pygame.time.wait(20)
pygame.display.update(n.x*10,n.y*10,10,10)
elif isSecondLayer == True and grid1[n.x][n.y] == True:
#draw some stuff
pygame.draw.rect(screen, (255,255,255),(s[0]*10,s[1]*10,10,10))
pygame.display.update(s[0]*10,s[1]*10,10,10)
#drawing red nodes
pygame.draw.rect(screen,(255,100,0),(n.x*10,n.y*10,10,10))
#draw_arrow(get_points(n),screen)
pygame.time.wait(20)
pygame.display.update(n.x*10,n.y*10,10,10)
#remove from the 'closed' list
ons[n.x][n.y].closed = True
closedList.append(n);
#Now we have our path, we just need to trace it
#trace the parent of every node until the beginning is reached
openListCount = len(openl)
closedListCount = len(closedList)
count = count +openListCount
print "processed cells", count
moves = []
isPathFound = False # flag used for multi layer
if n!= None:
while (n.x,n.y) != s:
moves.insert(0,(n.x,n.y))
if(IsBlack): # second layer
pygame.draw.rect(screen,(255,255,0),(n.x*10,n.y*10,10,10))
elif grid2[n.x][n.y] == True:
pygame.draw.rect(screen,(200,200,200),(n.x*10,n.y*10,10,10))
pygame.display.update(n.x*10-20,n.y*10-20,40,40)
closedList.remove(n)
closedListCount = len(closedList)
pygame.time.wait(20)
n = n.parent#trace back to the previous node
#pygame.time.wait(10)
end_time = time.time()
print "total time taken : " , end_time - start_time
# Turn back all red and blue squares back to green
#if isSecondLayer != True:
for i in range (0, openListCount):
n = min(openl)
#n = n[2]
openl.remove(n)
n = n[1]
if grid2[n.x][n.y] == True and grid1[n.x][n.y] == True:
pygame.draw.rect(screen,(100,255,100),(n.x*10,n.y*10,10,10))
pygame.display.update(n.x*10,n.y*10,10,10)
for i in range (0, closedListCount):
n = min(closedList)
closedList.remove(n)
if grid2[n.x][n.y] == True and grid1[n.x][n.y] == True:
pygame.draw.rect(screen,(100,255,100),(n.x*10,n.y*10,10,10))
pygame.display.update(n.x*10,n.y*10,10,10)
return moves
| {
"repo_name": "abidrahmank/MyRoughWork",
"path": "maze_solver/final_maze_ 1.0.2.10_demo/Leetest_1.0.2.10/LeeColor.py",
"copies": "1",
"size": "12063",
"license": "mit",
"hash": -6280024352339706000,
"line_mean": 45.0419847328,
"line_max": 128,
"alpha_frac": 0.4202105612,
"autogenerated": false,
"ratio": 3.9370104438642297,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.48572210050642295,
"avg_score": null,
"num_lines": null
} |
# as task I most to implement the other sort algorithms
class QuickSort:
def __init__(self, arr, is_decrease=True):
self.arr = arr
self.is_decrease = is_decrease
def get_arr(self):
if len(self.arr) == 1:
return self.arr
if self.is_decrease:
self.quick_sort_decrease(self.arr, 0, len(self.arr) - 1)
else:
self.quick_sort_increase(self.arr, 0, len(self.arr) - 1)
return self.arr
def quick_sort_decrease(self, arr, izq=0, der=0):
i = izq
j = der
pivote = arr[int((izq + der) / 2)]
while i <= j:
while arr[i] < pivote:
i += 1
while arr[j] > pivote:
j -= 1
if i <= j:
aux = arr[i]
arr[i] = arr[j]
arr[j] = aux
i += 1
j -= 1
if izq < j:
self.quick_sort_decrease(arr, izq, j)
if i < der:
self.quick_sort_decrease(arr, i, der)
def quick_sort_increase(self, arr, izq=0, der=0):
i = izq
j = der
pivote = arr[int((izq + der) / 2)]
while i >= j:
while arr[i] > pivote:
i += 1
while arr[j] < pivote:
j -= 1
if i <= j:
aux = arr[i]
arr[i] = arr[j]
arr[j] = aux
i += 1
j -= 1
if izq > j:
self.quick_sort_increase(arr, izq, j)
if i > der:
self.quick_sort_increase(arr, i, der)
| {
"repo_name": "ragnarok22/encriptednetwork",
"path": "algorithm/sort.py",
"copies": "1",
"size": "1606",
"license": "mit",
"hash": -4998332663863325000,
"line_mean": 27.6785714286,
"line_max": 68,
"alpha_frac": 0.4196762142,
"autogenerated": false,
"ratio": 3.3250517598343685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42447279740343685,
"avg_score": null,
"num_lines": null
} |
"""a state engine.
"""
import pygame
from pygame.locals import *
class State:
"""Template Class -- for a state.
Arguments:
game -- The state engine.
value -- I usually pass in a custom value to a state
For all of the template methods, they should return None unless they return
a new State to switch the engine to.
"""
def __init__(self,game,value=None):
self.game,self.value = game,value
def init(self):
"""Template Method - Initialize the state, called once the first time a state is selected."""
return
def paint(self,screen):
"""Template Method - Paint the screen. Called once after the state is selected.
State is responsible for calling pygame.display.flip() or whatever.
"""
return
def repaint(self):
"""Template Method - Request a repaint of this state."""
self._paint = 1
def update(self,screen):
"""Template Method - Update the screen.
State is responsible for calling pygame.display.update(updates) or whatever.
"""
return
def loop(self):
"""Template Method - Run a logic loop, called once per frame."""
return
def event(self,e):
"""Template Method - Recieve an event."""
return
class Quit(State):
"""A state to quit the state engine."""
def init(self):
self.game.quit = 1
class Game:
"""Template Class - The state engine."""
def fnc(self,f,v=None):
s = self.state
if not hasattr(s,f): return 0
f = getattr(s,f)
if v != None: r = f(v)
else: r = f()
if r != None:
self.state = r
self.state._paint = 1
return 1
return 0
def run(self,state,screen=None):
"""Run the state engine, this is a infinite loop (until a quit occurs).
Arguments:
game -- a state engine
screen -- the screen
"""
self.quit = 0
self.state = state
if screen != None: self.screen = screen
self.init()
while not self.quit:
self.loop()
def loop(self):
s = self.state
if not hasattr(s,'_init') or s._init:
s._init = 0
if self.fnc('init'): return
else:
if self.fnc('loop'): return
if not hasattr(s,'_paint') or s._paint:
s._paint = 0
if self.fnc('paint',self.screen): return
else:
if self.fnc('update',self.screen): return
for e in pygame.event.get():
#NOTE: this might break API?
#if self.event(e): return
if not self.event(e):
if self.fnc('event',e): return
self.tick()
return
def init(self):
"""Template Method - called at the beginning of State.run() to initialize things."""
return
def tick(self):
"""Template Method - called once per frame, usually for timer purposes."""
pygame.time.wait(10)
def event(self,e):
"""Template Method - called with each event, so the engine can capture special events.
Rturn a True value if the event is captured and does not need to be passed onto the current
state
"""
if e.type is QUIT:
self.state = Quit(self)
return 1
| {
"repo_name": "Southpaw-TACTIC/Team",
"path": "src/python/Lib/site-packages/pgu/engine.py",
"copies": "1",
"size": "3499",
"license": "epl-1.0",
"hash": -1622266453571967000,
"line_mean": 25.3082706767,
"line_max": 101,
"alpha_frac": 0.5364389826,
"autogenerated": false,
"ratio": 4.379224030037547,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5415663012637547,
"avg_score": null,
"num_lines": null
} |
""" A static class that allows the type of a file to be checked.
"""
import os
from subprocess import PIPE
from .popenwrapper import Popen
class FileType:
""" A hack to grok the type of input files.
"""
# These are just here to keep pylint happy.
UNKNOWN = None
ELF_EXECUTABLE = None
ELF_OBJECT = None
ELF_SHARED = None
MACH_EXECUTABLE = None
MACH_OBJECT = None
MACH_SHARED = None
ARCHIVE = None
THIN_ARCHIVE = None
# Provides int -> str map
revMap = {}
@classmethod
def getFileType(cls, fileName):
""" Returns the type of a file.
This is a hacky way of determining
the type of file we are looking at.
Maybe we should use python-magic instead?
"""
retval = None
fileP = Popen(['file', os.path.realpath(fileName)], stdout=PIPE)
output = fileP.communicate()[0]
foutput = output.decode()
foutput = foutput.split(' ', 1)[1] # Strip file path
if 'ELF' in foutput and 'executable' in foutput:
retval = cls.ELF_EXECUTABLE
elif 'Mach-O' in foutput and 'executable' in foutput:
retval = cls.MACH_EXECUTABLE
elif 'ELF' in foutput and 'shared' in foutput:
retval = cls.ELF_SHARED
elif 'Mach-O' in foutput and 'dynamically linked shared' in foutput:
retval = cls.MACH_SHARED
elif 'current ar archive' in foutput:
retval = cls.ARCHIVE
elif 'thin archive' in foutput:
retval = cls.THIN_ARCHIVE
elif 'ELF' in foutput and 'relocatable' in foutput:
retval = cls.ELF_OBJECT
elif 'Mach-O' in foutput and 'object' in foutput:
retval = cls.MACH_OBJECT
else:
retval = cls.UNKNOWN
return retval
@classmethod
def getFileTypeString(cls, fti):
""" Returns the string name of the file type.
"""
if fti in cls.revMap:
return cls.revMap[fti]
return 'UNKNOWN'
@classmethod
def init(cls):
""" Initializes the static fields.
"""
for (index, name) in enumerate(('UNKNOWN',
'ELF_EXECUTABLE',
'ELF_OBJECT',
'ELF_SHARED',
'MACH_EXECUTABLE',
'MACH_OBJECT',
'MACH_SHARED',
'ARCHIVE',
'THIN_ARCHIVE')):
setattr(cls, name, index)
cls.revMap[index] = name
# Initialise FileType static class
FileType.init()
| {
"repo_name": "travitch/whole-program-llvm",
"path": "wllvm/filetype.py",
"copies": "2",
"size": "2741",
"license": "mit",
"hash": -2678989733032616400,
"line_mean": 29.4555555556,
"line_max": 76,
"alpha_frac": 0.5260853703,
"autogenerated": false,
"ratio": 4.364649681528663,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0025950033929783207,
"num_lines": 90
} |
''' AST BaseNode Abstract Class
'''
from helper import *
'''utils functions'''
indentLevel = 0
outputString = []
def indent():
global indentLevel
indentLevel += 1
def outdent():
global indentLevel
indentLevel -= 1
def append(val):
global outputString
if isinstance(val, list):
outputString.extend(val)
else:
outputString.append(val)
def finishOutput():
global outputString
# printObj(outputString)
res = ''.join(outputString)
initOutput()
return res
def initOutput():
global outputString, indentLevel
indentLevel = 0
outputString = ['<?php', '\n']
def popStr():
global outputString
outputString.pop()
def popStrToLastNewLine():
while lastStr() != '\n':
popStr()
popStr()
def lastStr():
global outputString
return outputString[-1]
def indentSpaces():
global indentLevel
return ''.join([' ' for i in xrange(0, indentLevel)])
''' Node classes '''
class BaseNode(object):
def __init__(self, val):
self.val = val
def gen(self):
if self.val:
from helper import isString
if isString(self.val):
append(self.val)
elif hasattr(self.val, 'gen'):
self.val.gen()
class WithTerminatorNode(BaseNode):
def __init__(self, val, terminator):
super(WithTerminatorNode, self).__init__(val)
self.terminator = terminator
class UnaryOperationNode(BaseNode):
def __init__(self, op, exp):
super(UnaryOperationNode, self).__init__(op)
self.exp = exp
def gen(self):
super(UnaryOperationNode, self).gen()
self.exp.gen()
class UnaryOperationWithSpaceNode(UnaryOperationNode):
def gen(self):
super(UnaryOperationWithSpaceNode, self).gen()
append(' ')
self.exp.gen()
class BinaryOperationNode(BaseNode):
def __init__(self, exp1, op, exp2):
super(BinaryOperationNode, self).__init__(op)
self.exp1 = exp1
self.exp2 = exp2
def gen(self):
self.exp1.gen()
append([' ', self.val, ' '])
self.exp2.gen()
class Root(BaseNode):
def gen(self):
initOutput()
super(Root, self).gen()
return finishOutput()
class Body(BaseNode):
def __init__(self, body, val):
self.body = body
super(Body, self).__init__(val)
def gen(self):
if self.body != None:
self.body.gen()
super(Body, self).gen()
'''
indent is Line's duty
'''
class Line(BaseNode):
def gen(self):
append(indentSpaces())
super(Line, self).gen()
append('\n')
class Embeded(BaseNode):
pass
class Statement(WithTerminatorNode):
def __init__(self, val, terminator):
super(Statement, self).__init__(val, terminator)
def gen(self):
super(Statement, self).gen()
if not self.val.val=='':
append('; ')
self.terminator.gen()
else:
popStrToLastNewLine()
class LambdaAssignStatement(BaseNode):
def __init__(self, val, lambda_):
super(LambdaAssignStatement, self).__init__(val)
self.lambda_ = lambda_
def gen(self):
super(LambdaAssignStatement, self).gen()
append(' = ')
self.lambda_.gen()
class StatementWithoutTerminator(BaseNode):
pass
class JustStrStatementWithTerminator(WithTerminatorNode):
def gen(self):
if not self.val=='':
append('; ')
self.terminator.gen()
else:
popStrToLastNewLine()
class CodeBlock(BaseNode):
pass
class Expression(BaseNode):
def gen(self):
if isinstance(self.val, Expression):
append('(')
super(Expression, self).gen()
append(')')
else:
super(Expression, self).gen()
class Block(BaseNode):
def gen(self):
indent()
super(Block, self).gen()
outdent()
class InitModifier(BaseNode):
pass
class AssignRightSide(BaseNode):
def __init__(self, assign, exp):
super(AssignRightSide, self).__init__(assign)
self.exp = exp
def gen(self):
append(' ')
super(AssignRightSide, self).gen()
append(' ')
self.exp.gen()
class Value(BaseNode):
pass
class Literal(BaseNode):
pass
class SimpleLiteral(BaseNode):
pass
class ArrayLiteral(BaseNode):
def gen(self):
append('[')
super(ArrayLiteral, self).gen()
append(']')
class CommaList(BaseNode):
def __init__(self, list_, val):
super(CommaList, self).__init__(val)
self.list_ = list_
def gen(self):
if self.list_ != None:
self.list_.gen()
append(', ')
super(CommaList, self).gen()
class ArrayLiteralContentList(CommaList):
pass
class ArrayLiteralContent(BaseNode):
def __init__(self, key, val):
self.key = key
super(ArrayLiteralContent, self).__init__(val)
def gen(self):
if self.key != None:
self.key.gen()
append(' => ')
super(ArrayLiteralContent, self).gen()
class Varible(BaseNode):
def __init__(self, nsContentName, val):
self.nsContentName = nsContentName
super(Varible, self).__init__(val)
def gen(self):
if self.nsContentName:
self.nsContentName.gen()
append('::')
if isinstance(self.val, NsContentName):
self.val = self.val.val
if not self.val.isupper():
append('$')
super(Varible, self).gen()
class Assignable(BaseNode):
def __init__(self, val, exp, id_):
super(Assignable, self).__init__(val)
self.exp = exp
self.id_ = id_
def gen(self):
super(Assignable, self).gen()
if self.exp != None and self.id_ == None:
append('[')
self.exp.gen()
append(']')
elif self.exp == None and self.id_ != None:
append(['->', self.id_])
class Assign(BaseNode):
def __init__(self, val, rightSide):
super(Assign, self).__init__(val)
self.rightSide = rightSide
def gen(self):
super(Assign, self).gen()
self.rightSide.gen()
class ArgList(CommaList):
pass
class Arg(BaseNode):
pass
class ParamList(CommaList):
pass
class Param(BaseNode):
def __init__(self, ref, val, init):
self.ref = ref
super(Param, self).__init__(val)
self.init = init
def gen(self):
self.ref.gen()
append('$')
super(Param, self).gen()
self.init.gen()
class Call(BaseNode):
def __init__(self, val, args):
super(Call, self).__init__(val)
self.args = args
def gen(self):
super(Call, self).gen()
last = lastStr()
if last == 'echo':
append(' ')
else:
append('(')
self.args.gen()
if not last == 'echo':
append(')')
class Callable(BaseNode):
pass
class Lambda(WithTerminatorNode):
def __init__(self, paramList, use, terminator, block):
super(Lambda, self).__init__(paramList, terminator)
self.use = use
self.block = block
def gen(self):
append('function (')
super(Lambda, self).gen()
append(') ')
self.use.gen()
append('{ ')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class UseModifier(BaseNode):
def __init__(self, paramList):
super(UseModifier, self).__init__(paramList)
def gen(self):
if not self.val:
return
append('use (')
super(UseModifier, self).gen()
append(')')
class Terminator(BaseNode):
pass
class Namespace(BaseNode):
def gen(self):
append('namespace ')
super(Namespace, self).gen()
class UseNamespace(BaseNode):
def gen(self):
append('use ')
super(UseNamespace, self).gen()
class NsContentName(BaseNode):
def __init__(self, list_, val):
super(NsContentName, self).__init__(val)
self.list_ = list_
def gen(self):
self.list_ and self.list_.gen()
super(NsContentName, self).gen()
class NsContentNameList(CommaList):
pass
class NsContentNameAsId(BaseNode):
def __init__(self, val, id_):
super(NsContentNameAsId, self).__init__(val)
self.id_ = id_
def gen(self):
super(NsContentNameAsId, self).gen()
append(['as ', self.id_])
class NsContentNameAsIdList(CommaList):
pass
class If(WithTerminatorNode):
def __init__(self, val, elseBlock, terminator):
super(If, self).__init__(val, terminator)
self.elseBlock = elseBlock
def gen(self):
super(If, self).gen()
if self.elseBlock:
append(' else {')
self.terminator.gen()
append('\n')
self.elseBlock.gen()
append([indentSpaces(), '}'])
class IfBlock(WithTerminatorNode):
def __init__(self, list_, exp, terminator, block):
super(IfBlock, self).__init__(exp, terminator)
self.list_ = list_
self.block = block
def gen(self):
if self.list_ != None:
self.list_.gen()
append(' else if (')
else:
append('if (')
super(IfBlock, self).gen()
append(') {')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class Switch(WithTerminatorNode):
def __init__(self, exp, terminator, content):
super(Switch, self).__init__(exp, terminator)
self.content = content
def gen(self):
append('switch (')
super(Switch, self).gen()
append(') { ')
self.terminator.gen()
append('\n')
self.content.gen()
append([indentSpaces(), '}'])
class SwitchContent(Block):
pass
class InSwitchDefList(Body):
pass
class InSwitchDef(Line):
pass
class ValueList(BaseNode):
def __init__(self, list_, value):
super(ValueList, self).__init__(value)
self.list_ = list_
class Case(WithTerminatorNode):
def __init__(self, case, valueList, terminator, block):
super(Case, self).__init__(case, terminator)
self.valueList = valueList
self.block = block
def gen(self):
if self.val == 'case':
valueList = []
while(self.valueList):
valueList.append(self.valueList.val)
self.valueList = self.valueList.list_
valueList.reverse()
popStr()
for value in valueList:
append([indentSpaces(), 'case '])
value.gen()
append([' : ', '\n'])
popStr()
self.terminator.gen()
append('\n')
self.block.gen()
indent()
append([indentSpaces(), 'break; ', '\n'])
outdent()
else:
append('default : ')
self.terminator.gen()
append('\n')
self.block.gen()
popStr()
class For(WithTerminatorNode):
def __init__(self, id1Ref, id1, id2Ref, id2, exp, terminator, block):
super(For, self).__init__(exp, terminator)
self.id1Ref = id1Ref
self.id1 = id1
self.id2Ref = id2Ref
self.id2 = id2
self.block = block
def gen(self):
append('foreach (')
super(For, self).gen()
append(' as ')
self.id1Ref.gen()
append(['$', self.id1])
if self.id2:
append(' => ')
self.id2Ref.gen()
append(['$', self.id2])
append(') { ')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class While(WithTerminatorNode):
def __init__(self, exp, terminator, block):
super(While, self).__init__(exp, terminator)
self.block = block
def gen(self):
append('while (')
super(While, self).gen()
append(') { ')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class DoWhile(WithTerminatorNode):
def __init__(self, term1, block, cmtOrEptList, exp, term2):
super(DoWhile, self).__init__(exp, term1)
self.block = block
self.term2 = term2
self.cmtOrEptList = cmtOrEptList
def gen(self):
append('do { ')
self.terminator.gen()
append('\n')
self.block.gen()
indent()
self.cmtOrEptList.gen()
outdent()
append([indentSpaces(), '} while('])
super(DoWhile, self).gen()
append('); ')
self.term2.gen()
class CommentOrEmptyLineList(Body):
pass
class CommentOrEmptyLine(Line):
pass
class Try(WithTerminatorNode):
def __init__(self, tryTerm, tryBlock, catch, finTerm, finBlock):
super(Try, self).__init__(tryBlock, tryTerm)
self.catch = catch
self.finTerm = finTerm
self.finBlock = finBlock
def gen(self):
append('try { ')
self.terminator.gen()
append('\n')
super(Try, self).gen()
append([indentSpaces(), '} '])
self.catch.gen()
if self.finTerm:
append('finally { ')
self.finTerm.gen()
append('\n')
self.finBlock.gen()
append([indentSpaces(), '}'])
class Catch(WithTerminatorNode):
def __init__(self, catch, className, var, terminator, block):
super(Catch, self).__init__(var, terminator)
self.catch = catch
self.className = className
self.block = block
def gen(self):
if self.catch:
self.catch.gen()
append('catch (')
self.className.gen()
append(' ')
super(Catch, self).gen()
append(') { ')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '} '])
class Class(WithTerminatorNode):
def __init__(self, id_, extends, implements, terminator, content):
super(Class, self).__init__(id_, terminator)
self.extends = extends
self.implements = implements
self.content = content
def gen(self):
append(['class ', self.val])
self.extends.gen()
self.implements.gen()
append(' {')
self.terminator.gen()
append('\n')
self.content.gen()
append([indentSpaces(), '}'])
class ClassContent(Block):
pass
class InClassDefList(Body):
pass
class InClassDef(Line):
pass
class Interface(WithTerminatorNode):
def __init__(self, id_, extends, terminator, content):
super(Interface, self).__init__(id_, terminator)
self.extends = extends
self.terminator = terminator
self.content = content
def gen(self):
append(['interface ', self.val])
self.extends.gen()
append(' {')
self.terminator.gen()
append('\n')
self.content.gen()
append([indentSpaces(), '}'])
class InterfaceContent(Block):
pass
class InterfaceDefList(Body):
pass
class InterfaceDef(Line):
pass
class ExtendsModifier(BaseNode):
def gen(self):
if not self.val:
return
append(' extends ')
super(ExtendsModifier, self).gen()
class ImplementsModifier(BaseNode):
def gen(self):
if not self.val:
return
append(' implements ')
super(ImplementsModifier, self).gen()
class JustStrModifier(BaseNode):
def gen(self):
super(JustStrModifier, self).gen()
self.val and append(' ')
class AccessModifier(JustStrModifier):
pass
class StaticModifier(JustStrModifier):
pass
class RefModifier(BaseNode):
pass
class MemberFuncDecWithoutTerminator(BaseNode):
def __init__(self, access, static, ref, id_, paramList):
super(MemberFuncDecWithoutTerminator, self).__init__(id_)
self.access = access
self.static = static
self.ref = ref
self.paramList = paramList
def gen(self):
self.access.gen()
self.static.gen()
self.ref.gen()
append('function ')
super(MemberFuncDecWithoutTerminator, self).gen()
append('(')
self.paramList.gen()
append(')')
class MemberFuncDec(WithTerminatorNode):
def gen(self):
super(MemberFuncDec, self).gen()
append('; ')
self.terminator.gen()
class MemberFuncDef(WithTerminatorNode):
def __init__(self, val, terminator, block):
super(MemberFuncDef, self).__init__(val, terminator)
self.block = block
def gen(self):
super(MemberFuncDef, self).gen()
append(' {')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class DataMemberDef(WithTerminatorNode):
def __init__(self, access, static, id_, init, terminator):
super(DataMemberDef, self).__init__(id_, terminator)
self.access = access
self.static = static
self.init = init
def gen(self):
self.access.gen()
self.static.gen()
append('$')
super(DataMemberDef, self).gen()
self.init.gen()
append('; ')
self.terminator.gen()
class FuncDef(WithTerminatorNode):
def __init__(self,init, id_, paramList, terminator, block):
self.init = init
super(FuncDef, self).__init__(id_, terminator)
self.paramList = paramList
self.block = block
def gen(self):
append('function ')
self.init.gen()
super(FuncDef, self).gen()
append('(')
self.paramList.gen()
append(') {')
self.terminator.gen()
append('\n')
self.block.gen()
append([indentSpaces(), '}'])
class ConstDefWithoutTerminator(BaseNode):
def __init__(self, id_, assignRightSide):
super(ConstDefWithoutTerminator, self).__init__(id_)
self.assignRightSide = assignRightSide
def gen(self):
append('const ')
super(ConstDefWithoutTerminator, self).gen()
self.assignRightSide.gen()
class ConstDef(WithTerminatorNode):
def gen(self):
super(ConstDef, self).gen()
append('; ')
self.terminator.gen()
class Return(BaseNode):
def gen(self):
append('return')
self.val and append(' ')
super(Return, self).gen()
class Throw(BaseNode):
def gen(self):
append('throw ')
super(Throw, self).gen()
class Yield(BaseNode):
def __init__(self, exp1, exp2):
super(Yield, self).__init__(exp1)
self.exp2 = exp2
def gen(self):
append('yield ')
super(Yield, self).gen()
if self.exp2:
append(' => ')
self.exp2.gen()
class GlobalDec(BaseNode):
def gen(self):
append('global ')
super(GlobalDec, self).gen()
class GlobalVaribleList(CommaList):
def gen(self):
if self.list_ != None:
self.list_.gen()
append(', ')
append('$')
super(CommaList, self).gen()
class Operation(BaseNode):
pass
class UMath(UnaryOperationNode):
pass
class BMath(BinaryOperationNode):
pass
class Cast(UnaryOperationNode):
pass
class InDecrement(UnaryOperationNode):
def __init__(self, op, exp, back):
super(InDecrement, self).__init__(op, exp)
self.back = back
def gen(self):
if self.back == True:
self.exp.gen()
append(self.val)
else:
super(InDecrement, self).gen()
class UBit(UnaryOperationNode):
pass
class BBit(UnaryOperationNode):
pass
class InstanceOf(BinaryOperationNode):
pass
class ULogic(UnaryOperationNode):
pass
class BLogic(BinaryOperationNode):
pass
class NewOrClone(BaseNode):
def __init__(self, newOrClone, nsContentName, argList, varible):
super(NewOrClone, self).__init__(nsContentName)
self.argList = argList
self.varible = varible
self.newOrClone = newOrClone
def gen(self):
append(self.newOrClone)
append(' ')
if self.argList:
super(NewOrClone, self).gen()
append('(')
self.argList.gen()
append(')')
else:
self.varible.gen()
class Compare(BinaryOperationNode):
pass
class Ternary(BaseNode):
def __init__(self, exp1, exp2, exp3):
super(Ternary, self).__init__(exp1)
self.exp2 = exp2
self.exp3 = exp3
def gen(self):
super(Ternary, self).gen()
append(' ? ')
self.exp2.gen()
append(' : ')
self.exp3.gen()
class At(UnaryOperationNode):
pass
class Ref(UnaryOperationNode):
pass
if __name__ == '__main__':
pass
# b = BaseNode('str')
# print b.isinstance(basestring)
# print Body.indentLevel
| {
"repo_name": "zhengkaifu/PingPHP",
"path": "src/nodes.py",
"copies": "1",
"size": "21197",
"license": "mit",
"hash": -5506492932397695000,
"line_mean": 21.6463675214,
"line_max": 73,
"alpha_frac": 0.5554559607,
"autogenerated": false,
"ratio": 3.849800217944061,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49052561786440607,
"avg_score": null,
"num_lines": null
} |
"""AST classes pending implementation."""
from scoff.ast import ScoffASTObject
class AXIDescription(ScoffASTObject):
"""AXIDescription AST."""
__slots__ = ("static_declarations", "params", "statements")
def __init__(
self,
parent=None,
static_declarations=None,
params=None,
statements=None,
**kwargs
):
"""Initialize."""
super().__init__(
parent=parent,
static_declarations=static_declarations,
params=params,
statements=statements,
**kwargs
)
class StaticStatement(ScoffASTObject):
"""StaticStatement AST."""
__slots__ = ("var", "value")
def __init__(self, parent, var, value, **kwargs):
"""Initialize."""
super().__init__(parent=parent, var=var, value=value, **kwargs)
class ParameterStatement(ScoffASTObject):
"""ParameterStatement AST."""
__slots__ = ("name", "value")
def __init__(self, parent, name, value, **kwargs):
"""Initialize."""
super().__init__(parent=parent, name=name, value=value, **kwargs)
class FnCall(ScoffASTObject):
"""FnCall AST."""
__slots__ = ("fn", "args")
def __init__(self, parent, fn, args=None, **kwargs):
"""Initialize."""
super().__init__(parent=parent, fn=fn, args=args, **kwargs)
class SlaveRegister(ScoffASTObject):
"""SlaveRegister AST."""
__slots__ = ("name", "address", "properties")
def __init__(self, parent, name, address, properties=None, **kwargs):
"""Initialize."""
super().__init__(
parent=parent,
name=name,
address=address,
properties=properties,
**kwargs
)
class TemplatedNameSubstFragment(ScoffASTObject):
"""TemplatedNameSubstFragment AST."""
__slots__ = ("fragment", "templates")
def __init__(self, parent, fragment, templates=None, **kwargs):
"""Initialize."""
super().__init__(
parent=parent, fragment=fragment, templates=templates, **kwargs
)
class TemplatedNameSubstFmt(ScoffASTObject):
"""TemplatedNameSubstFmt AST."""
__slots__ = ("arg",)
def __init__(self, parent, arg, **kwargs):
"""Initialize."""
super().__init__(parent=parent, arg=arg, **kwargs)
class TemplatedNameSubst(ScoffASTObject):
"""TemplatedNameSubstFmt AST."""
__slots__ = ("fragments",)
def __init__(self, parent, fragments, **kwargs):
"""Initialize."""
super().__init__(parent=parent, fragments=fragments, **kwargs)
class SlaveRegisterField(ScoffASTObject):
"""SlaveRegisterField AST."""
__slots__ = (
"source",
"position",
"position",
"access",
"access",
"default",
"default",
"properties",
)
def __init__(
self,
parent,
source,
position=None,
access=None,
default=None,
properties=None,
**kwargs
):
"""Initialize."""
super().__init__(
parent=parent,
source=source,
position=position,
access=access,
default=default,
properties=properties,
**kwargs
)
class SlaveOutput(ScoffASTObject):
"""SlaveOutput AST."""
__slots__ = ("desc",)
def __init__(self, parent, desc, **kwargs):
"""Initialize."""
super().__init__(parent=parent, desc=desc, **kwargs)
class SlaveInput(ScoffASTObject):
"""SlaveInput AST."""
__slots__ = ("desc",)
def __init__(self, parent, desc, **kwargs):
"""Initialize."""
super().__init__(parent=parent, desc=desc, **kwargs)
class OutputDescriptor(ScoffASTObject):
"""OutputDescriptor AST."""
__slots__ = ("name", "sig")
def __init__(self, parent, name, sig, **kwargs):
"""Initialize."""
super().__init__(parent=parent, name=name, sig=sig, **kwargs)
class InputDescriptor(ScoffASTObject):
"""InputDescriptor AST."""
__slots__ = ("name", "sig")
def __init__(self, parent, name, sig, **kwargs):
"""Initialize."""
super().__init__(parent=parent, name=name, sig=sig, **kwargs)
class SignalSource(ScoffASTObject):
"""SignalSource AST."""
__slots__ = ("dest",)
def __init__(self, parent, dest, **kwargs):
"""Initialize."""
super().__init__(parent=parent, dest=dest, **kwargs)
class SignalDestination(ScoffASTObject):
"""SignalDestination AST."""
__slots__ = ("dest",)
def __init__(self, parent, dest, **kwargs):
"""Initialize."""
super().__init__(parent=parent, dest=dest, **kwargs)
class SourceBitAccessor(ScoffASTObject):
"""SourceBitAccessor AST."""
__slots__ = ("register", "bit")
def __init__(self, parent, register, bit, **kwargs):
"""Initialize."""
super().__init__(parent=parent, register=register, bit=bit, **kwargs)
class FieldBitAccessor(ScoffASTObject):
"""FieldBitAccessor AST."""
__slots__ = ("register", "bit")
def __init__(self, parent, register, bit, **kwargs):
"""Initialize."""
super().__init__(parent=parent, register=register, bit=bit, **kwargs)
class GenerateStatement(ScoffASTObject):
"""GenerateStatement AST."""
__slots__ = ("var", "range", "gen_scope")
def __init__(self, parent, var, range, gen_scope, **kwargs):
"""Initialize."""
super().__init__(
parent=parent, var=var, range=range, gen_scope=gen_scope, **kwargs
)
class Range(ScoffASTObject):
"""Range AST."""
__slots__ = ("left", "right")
def __init__(self, parent, left, right, **kwargs):
"""Initialize."""
super().__init__(parent=parent, left=left, right=right, **kwargs)
class RegisterProperty(ScoffASTObject):
"""RegisterProperty AST."""
__slots__ = ("name", "value")
def __init__(self, parent, name, value, **kwargs):
"""Initialize."""
super().__init__(parent=parent, name=name, value=value, **kwargs)
class PositiveIntegerValue(ScoffASTObject):
"""PositiveIntegerValue AST."""
__slots__ = ("hex", "posint")
def __init__(self, parent, hex, posint, **kwargs):
"""Initialize."""
super().__init__(parent=parent, hex=hex, posint=posint, **kwargs)
class BitField(ScoffASTObject):
"""BitField AST."""
__slots__ = ("left", "right")
def __init__(self, parent, left, right=None, **kwargs):
"""Initialize."""
super().__init__(parent=parent, left=left, right=right, **kwargs)
MMAP_AST_CLASSES = (
AXIDescription,
StaticStatement,
ParameterStatement,
FnCall,
SlaveRegister,
TemplatedNameSubstFragment,
TemplatedNameSubstFmt,
TemplatedNameSubst,
SlaveRegisterField,
SlaveOutput,
SlaveInput,
OutputDescriptor,
InputDescriptor,
SignalSource,
SignalDestination,
SourceBitAccessor,
FieldBitAccessor,
GenerateStatement,
Range,
RegisterProperty,
PositiveIntegerValue,
BitField,
)
| {
"repo_name": "brunosmmm/hdltools",
"path": "hdltools/mmap/ast.py",
"copies": "1",
"size": "7133",
"license": "mit",
"hash": 6593967054202045000,
"line_mean": 22.9362416107,
"line_max": 78,
"alpha_frac": 0.5666619936,
"autogenerated": false,
"ratio": 3.9561841375485303,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5022846131148531,
"avg_score": null,
"num_lines": null
} |
# A stereo "White Noise" generator using STANDARD Python 2.5.2 or higher.
# This is for (PC)Linux(OS), (ONLY?), and was done purely for fun.
#
# It is another simple piece of testgear for the amateur electronics
# enthusiast and uses /dev/dsp instead of /dev/audio. Enjoy... ;o)
#
# (Original copyright, (C)2010, B.Walker, G0LCU.)
#
# DONATED TO LXF AS PUBLIC DOMAIN...
#
# Ensure the sound is enabled and the volume is turned up.
#
# Copy the file to the Lib folder/drawer/directory where Python resides,
# or where the modules reside, as "noise.py" without the quotes.
#
# Start the Python interpreter from a console/terminal window.
#
# For a quick way to run the noise generator just use at the ">>>" prompt:-
#
# >>> import noise[RETURN/ENTER]
#
# And away we go...
#
# This code is now Public Domain and you may do with it as you please...
#
# Coded on a(n) HP dual core notebook running PCLinuxOS 2009 and
# Python 2.5.2 for Linux; also tested on Knoppix 5.1.1 and Python 2.5.2
# and Debian 6.0.0 and Python 2.6.6...
#
# Connect an oscilloscope to the earphone socket(s) to see the noise
# waveform(s) being generated.
# Import any modules...
import os
import random
# Clear a terminal window ready to run this program.
print os.system("clear"),chr(13)," ",chr(13),
# The program proper...
def main():
# Make all variables global, a quirk of mine... :)
global noise
global value
global select
global count
global amplitudestring
global amplitude
# The INITIAL default values.
select="G0LCU."
value=0
noise=chr(value)
count=0
amplitudestring="64"
amplitude=64
# A continuous loop to re-generate noise as required...
while 1:
# Set up a basic user window.
print os.system("clear"),chr(13)," ",chr(13),
print
print "Simple Noise Generator using STANDARD Python 2.5.2"
print "for PCLinuxOS 2009, issued as Public Domain to LXF."
print
print "(Original copyright, (C)2010, B.Walker, G0LCU.)"
print
# Set amplitude level from 0 to 64 unclusive.
amplitudestring=raw_input("Enter amplitude level, 1 to 64:- ")
# Don`t allow any typo error at all within limits...
# On any typo error set amplitude to maximum.
if amplitudestring=="": amplitudestring="64"
if amplitudestring.isdigit()==0: amplitudestring="64"
if len(amplitudestring)>=3: amplitudestring="64"
# Now allocate the numerical value once the error chacking has been done.
amplitude=int(amplitudestring)
if amplitude<=1: amplitude=1
if amplitude>=64: amplitude=64
print
# Select RETURN/ENTER for "White Noise", OR, any other key then RETURN/ENTER to Quit.
select=raw_input("Press RETURN/ENTER for noise or any other key then RETURN/ENTER to Quit:- ")
if select!="": break
print os.system("clear"),chr(13)," ",chr(13),
print
print "A 10 second white noise audio burst..."
print
print "Amplitude level",amplitude,"\b..."
print
# Change the random seed value per run.
random.seed(None)
# Open up the audio channel(s) to write directly to.
# Note this DEMO uses /dev/dsp and NOT /dev/audo... ;o)
audio=file('/dev/dsp','wb')
# A count of 70000 is about 10 seconds of noise burst...
count=0
while count<70000:
# Generate a random byte value.
value=random.random()*amplitude
noise=chr(int(value))
# Write the character, (byte), "value" to the audio device.
audio.write(noise)
count=count+1
# Close the audio device when finished.
audio.close()
main()
# End of demo...
# Enjoy finding simple solutions to often very difficult problems...
| {
"repo_name": "ActiveState/code",
"path": "recipes/Python/577604_Simple_White_Noise_Generator_Using_Standard/recipe-577604.py",
"copies": "1",
"size": "3510",
"license": "mit",
"hash": 7276491850625021000,
"line_mean": 31.5,
"line_max": 96,
"alpha_frac": 0.7065527066,
"autogenerated": false,
"ratio": 3.2054794520547945,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44120321586547945,
"avg_score": null,
"num_lines": null
} |
"""asteria URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from announcements.views import (
AnnouncementListView
)
from django.conf.urls import (
url,
include,
)
from django.contrib import (
admin
)
import challenges.views \
as challenge_views
import teams.views \
as team_views
urlpatterns = [
url('', include('django.contrib.auth.urls')),
url(r'^$', AnnouncementListView .as_view(), name='announcements'),
url(r'^admin/', admin.site.urls),
url(r'^categories/$' , challenge_views.CategoryListView .as_view(), name='categories' ),
url(r'^category/(?P<slug>[-\w]+)/$' , challenge_views.CategoryDetailView .as_view(), name='category' ),
url(r'^challenge/(?P<slug>[-\w]+)/$', challenge_views.ChallengeDetailView.as_view(), name='challenge' ),
url(r'^challenges/$' , challenge_views.ChallengeListView .as_view(), name='challenges' ),
url(r'^level/(?P<pk>\d{1,32})/$' , challenge_views.LevelDetailView .as_view(), name='level' ),
url(r'^levels/$' , challenge_views.LevelListView .as_view(), name='levels' ),
url(r'^reveal_hint/$' , challenge_views.reveal_hint , name='reveal_hint'),
url(r'^submit_flag/$' , challenge_views.submit_flag , name='submit_flag'),
url(r'^appoint_captain/$' , team_views.appoint_captain , name='appoint_captain' ),
url(r'^change_team_name/$' , team_views.change_team_name , name='change_team_name' ),
url(r'^change_team_password/$' , team_views.change_team_password , name='change_team_password'),
url(r'^join_team/$' , team_views.join_team , name='join_team' ),
url(r'^player/(?P<slug>[-\w]+)$', team_views.PlayerView .as_view(), name='player' ),
url(r'^promote_demote/$' , team_views.promote_demote , name='promote_demote' ),
url(r'^register/$' , team_views.register , name='register' ),
url(r'^scoreboard/$' , team_views.ScoreboardView.as_view(), name='scoreboard' ),
url(r'^team/(?P<slug>[-\w]+)$' , team_views.TeamView .as_view(), name='team' ),
]
| {
"repo_name": "elespike/Asteria",
"path": "asteria/urls.py",
"copies": "1",
"size": "2896",
"license": "mit",
"hash": 1198885713320745200,
"line_mean": 47.2666666667,
"line_max": 109,
"alpha_frac": 0.5856353591,
"autogenerated": false,
"ratio": 3.4517282479141835,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4537363607014184,
"avg_score": null,
"num_lines": null
} |
"""Asterisk FastAGI server for use from the dialplan
You use an asterisk FastAGI like this from extensions.conf:
exten => 1000,3,AGI(agi://127.0.0.1:4573,arg1,arg2)
Where 127.0.0.1 is the server and 4573 is the port on which
the server is listening.
Module defines a standard Python logging module log 'FastAGI'
"""
from twisted.internet import protocol, reactor, defer
from twisted.internet import error as tw_error
from twisted.protocols import basic
import socket, logging, time
from starpy import error
log = logging.getLogger( 'FastAGI' )
FAILURE_CODE = -1
class FastAGIProtocol(basic.LineOnlyReceiver):
"""Protocol for the interfacing with the Asterisk FastAGI application
Attributes:
variables -- for connected protocol, the set of variables passed
during initialisation, keys are all-lower-case, set of variables
returned for an Asterisk 1.2.1 installation on Gentoo on a locally
connected channel:
agi_network = 'yes'
agi_request = 'agi://localhost'
agi_channel = 'SIP/mike-ccca'
agi_language = 'en'
agi_type = 'SIP'
agi_uniqueid = '1139871605.0'
agi_callerid = 'mike'
agi_calleridname = 'Mike Fletcher'
agi_callingpres = '0'
agi_callingani2 = '0'
agi_callington = '0'
agi_callingtns = '0'
agi_dnid = '1'
agi_rdnis = 'unknown'
agi_context = 'testing'
agi_extension = '1'
agi_priority = '1'
agi_enhanced = '0.0'
agi_accountcode = ''
# Internal:
readingVariables -- whether the instance is still in initialising by
reading the setup variables from the connection
messageCache -- stores incoming variables
pendingMessages -- set of outstanding messages for which we expect
replies
lostConnectionDeferred -- deferred firing when the connection is lost
delimiter -- uses bald newline instead of carriage-return-newline
XXX Lots of problems with data-escaping, no docs on how to escape special
characters that I can see...
"""
readingVariables = False
lostConnectionDeferred = None
delimiter = '\n'
def __init__( self, *args, **named ):
"""Initialise the AMIProtocol, arguments are ignored"""
self.messageCache = []
self.variables = {}
self.pendingMessages = []
def connectionMade( self ):
"""(Internal) Handle incoming connection (new AGI request)
Initiates read of the initial attributes passed by the server
"""
log.info( "New Connection" )
self.readingVariables = True
def connectionLost( self, reason ):
"""(Internal) Handle loss of the connection (remote hangup)"""
log.info( """Connection terminated""" )
try:
for df in self.pendingMessages:
df.errback( tw_error.ConnectionDone( """FastAGI connection terminated""") )
finally:
if self.lostConnectionDeferred:
self.lostConnectionDeferred.errback( reason )
del self.pendingMessages[:]
def onClose( self ):
"""Return a deferred which will fire when the connection is lost"""
if not self.lostConnectionDeferred:
self.lostConnectionDeferred = defer.Deferred()
return self.lostConnectionDeferred
def lineReceived(self, line):
"""(Internal) Handle Twisted's report of an incoming line from the manager"""
log.debug( 'Line In: %r', line )
if self.readingVariables:
if not line.strip():
self.readingVariables = False
self.factory.mainFunction( self )
else:
try:
key,value = line.split( ':', 1 )
value = value[1:].rstrip( '\n' ).rstrip( '\r' )
except ValueError, err:
log.error( """Invalid variable line: %r""", line )
else:
self.variables[ key.lower() ] = value
log.info( """%s = %r""", key,value )
else:
try:
df = self.pendingMessages.pop(0)
except IndexError, err:
log.warn( """Line received without pending deferred: %r""", line )
else:
if line.startswith( '200' ):
line = line[4:]
if line.lower().startswith( 'result=' ):
line = line[7:]
df.callback( line )
else:
# XXX parse out the error code
try:
errCode, line = line.split( ' ', 1 )
errCode = int( errCode )
except ValueError,err:
errCode = 500
df.errback( error.AGICommandFailure( errCode, line ) )
def sendCommand( self, commandString ):
"""(Internal) Send the given command to the other side"""
log.info( "Send Command: %r", commandString )
commandString = commandString.rstrip( '\n' ).rstrip( '\r' )
df = defer.Deferred()
self.pendingMessages.append( df )
self.sendLine( commandString )
return df
def checkFailure( self, result, failure='-1' ):
"""(Internal) Check for a failure-code, raise error if == result"""
# result code may have trailing information...
try:
resultInt,line = result.split( ' ',1)
except ValueError, err:
resultInt = result
if resultInt.strip() == failure:
raise error.AGICommandFailure( FAILURE_CODE, result )
return result
def resultAsInt( self, result ):
"""(Internal) Convert result to an integer value"""
try:
return int(result.strip())
except ValueError, err:
raise error.AGICommandFailure( FAILURE_CODE, result )
def secondResultItem( self, result ):
"""(Internal) Retrieve the second item on the result-line"""
return result.split( ' ',1 )[1]
def resultPlusTimeoutFlag( self, resultLine ):
"""(Internal) Result followed by optional flag declaring timeout"""
try:
digits, timeout = resultLine.split(' ',1)
return digits.strip(), True
except ValueError, err:
return resultLine.strip(), False
def dateAsSeconds( self, date ):
"""(Internal) Convert date to asterisk-compatible format"""
if hasattr( date, 'timetuple' ):
# XXX values seem to be off here...
date = time.mktime(date.timetuple())
elif isinstance( date, time.struct_time ):
date = time.mktime(date)
return date
def onRecordingComplete( self, resultLine ):
"""(Internal) Handle putative success, watch for failure-on-load problems"""
try:
digit,exitType,endposStuff = resultLine.split( ' ', 2 )
except ValueError, err:
pass
else:
digit = int(digit)
exitType = exitType.strip('()')
endposStuff = endposStuff.strip()
if endposStuff.startswith( 'endpos=' ):
endpos = int( endposStuff[7:].strip() )
return digit, exitType, endpos
raise ValueError( """Unexpected result on streaming completion: %r"""%(resultLine ))
def onStreamingComplete( self,resultLine, skipMS=0 ):
"""(Internal) Handle putative success, watch for failure-on-load problems"""
try:
digit,endposStuff = resultLine.split( ' ', 1 )
except ValueError, err:
pass
else:
digit = int(digit)
endposStuff = endposStuff.strip()
if endposStuff.startswith( 'endpos=' ):
endpos = int( endposStuff[7:].strip() )
if endpos == skipMS:
# "likely" an error according to the wiki, we'll raise an error...
raise error.AGICommandFailure( FAILURE_CODE, """End position %s == original position, result code %s"""%(
endpos, digit
))
return digit, endpos
raise ValueError( """Unexpected result on streaming completion: %r"""%(resultLine ))
def jumpOnError( self, reason, difference=100, forErrors=None ):
"""On error, jump to original priority+100
This is intended to be registered as an errBack on a deferred for
an end-user application. It performs the Asterisk-standard-ish
jump-on-failure operation, jumping to new priority of
priority+difference. It also forces return to the same context and
extension, in case some other piece of code has changed those.
difference -- priority jump to execute
forErrors -- if specified, a tuple of error classes to which this
particular jump is limited (i.e. only errors of this type will
generate a jump & disconnect)
returns deferred from the InSequence of operations required to reset
the address...
"""
if forErrors:
if not isinstance( forErrors, (tuple,list)):
forErrors = (forErrors,)
reason.trap( *forErrors )
sequence = InSequence()
sequence.append( self.setContext, self.variables['agi_context'] )
sequence.append( self.setExtension, self.variables['agi_extension'] )
sequence.append( self.setPriority, int(self.variables['agi_priority'])+difference )
sequence.append( self.finish )
return sequence()
# End-user API
def finish( self ):
"""Finish the AGI "script" (drop connection)
This command simply drops the connection to the Asterisk server,
which the FastAGI protocol interprets as a successful termination.
Note: There *should* be a mechanism for sending a "result" code,
but I haven't found any documentation for it.
"""
self.transport.loseConnection()
def answer( self ):
"""Answer the channel (go off-hook)
Returns deferred integer response code
"""
return self.sendCommand( "ANSWER" ).addCallback(
self.checkFailure
).addCallback( self.resultAsInt )
def channelStatus( self, channel=None ):
"""Retrieve the current channel's status
Result integers (from the wiki):
0 Channel is down and available
1 Channel is down, but reserved
2 Channel is off hook
3 Digits (or equivalent) have been dialed
4 Line is ringing
5 Remote end is ringing
6 Line is up
7 Line is busy
Returns deferred integer result code
This could be used to decide if we can forward the channel to a given
user, or whether we need to shunt them off somewhere else.
"""
if channel:
command = 'CHANNEL STATUS "%s"'%(channel)
else:
command = "CHANNEL STATUS"
return self.sendCommand( command ).addCallback(
self.checkFailure,
).addCallback( self.resultAsInt )
def controlStreamFile(
self, filename, escapeDigits,
skipMS=0, ffChar='*', rewChar='#', pauseChar=None,
):
"""Playback specified file with ability to be controlled by user
filename -- filename to play (on the asterisk server)
(don't use file-type extension!)
escapeDigits -- if provided,
skipMS -- number of milliseconds to skip on FF/REW
ffChar -- if provided, the set of chars that fast-forward
rewChar -- if provided, the set of chars that rewind
pauseChar -- if provided, the set of chars that pause playback
returns deferred (digit,endpos) on success, or errors on failure,
note that digit will be 0 if no digit was pressed AFAICS
"""
command = 'CONTROL STREAM FILE "%s" %r %s %r %r'%(
filename, escapeDigits, skipMS, ffChar, rewChar
)
if pauseChar:
command += ' %r'%( pauseChar )
return self.sendCommand( command ).addCallback( self.checkFailure )
def databaseDel( self, family, key ):
"""Delete the given key from the database
Returns deferred integer result code
"""
command = 'DATABASE DEL "%s" "%s"'%( family, key )
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='0',
).addCallback( self.resultAsInt )
def databaseDeltree( self, family, keyTree=None ):
"""Delete an entire family or a tree within a family from database
Returns deferred integer result code
"""
command = 'DATABASE DELTREE "%s"'%(family,)
if keyTree:
command += ' "%s"'%(keytree,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='0',
).addCallback( self.resultAsInt )
def databaseGet( self, family, key ):
"""Retrieve value of the given key from database
Returns deferred string value for the key
"""
command = 'DATABASE GET "%s" "%s"'%(family,key)
def returnValue( resultLine ):
# get the second item without the brackets...
return resultLine[1:-1]
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='0',
).addCallback( self.secondResultItem ).addCallback( returnValue )
def databaseSet( self, family, key, value ):
"""Set value of the given key to database
a.k.a databasePut on the asterisk side
Returns deferred integer result code
"""
command = 'DATABASE PUT "%s" "%s" "%s"'%(family,key, value)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='0',
).addCallback( self.resultAsInt )
databasePut = databaseSet
def execute( self, application, *options, **kwargs ):
"""Execute a dialplan application with given options
Note: asterisk calls this "exec", which is Python keyword
comma_delimiter -- Use new style comma delimiter for diaplan
application arguments. Asterisk uses pipes in 1.4 and older and
prefers commas in 1.6 and up. Pass comma_delimiter=True to avoid
warnings from Asterisk 1.6 and up.
Returns deferred string result for the application, which
may have failed, result values are application dependant.
"""
command = '''EXEC "%s"'''%(application)
if options:
if kwargs.pop('comma_delimiter', False) is True:
delimiter = ","
else:
delimiter = "|"
command += ' "%s"'%(
delimiter.join([
str(x) for x in options
])
)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-2',
)
def getData( self, filename, timeout=2.000, maxDigits=None ):
"""Playback file, collecting up to maxDigits or waiting up to timeout
filename -- filename without extension to play
timeout -- timeout in seconds (Asterisk uses milliseconds)
maxDigits -- maximum number of digits to collect
returns deferred (str(digits), bool(timedOut))
"""
timeout *= 1000
command = '''GET DATA "%s" %s'''%(filename, timeout)
if maxDigits is not None:
command = ' '.join([command, str(maxDigits)])
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultPlusTimeoutFlag )
def getOption( self, filename, escapeDigits, timeout=None ):
"""Playback file, collect 1 digit or timeout (return 0)
filename -- filename to play
escapeDigits -- digits which cancel playback/recording
timeout -- timeout in seconds (Asterisk uses milliseconds)
returns (chr(option) or '' on timeout, endpos)
"""
command = '''GET OPTION "%s" %r'''%(filename,escapeDigits)
if timeout is not None:
timeout *= 1000
command += ' %s'%(timeout,)
def charFirst( (c,position)):
if not c: # returns 0 on timeout
c = ''
else:
c = chr( c )
return c,position
return self.sendCommand( command ).addCallback(
self.checkFailure,
).addCallback(
self.onStreamingComplete
).addCallback( charFirst )
def getVariable( self, variable ):
"""Retrieve the given channel variable
From the wiki, variables of interest:
ACCOUNTCODE -- Account code, if specified
ANSWEREDTIME -- Time call was answered
BLINDTRANSFER -- Active SIP channel that dialed the number.
This will return the SIP Channel that dialed the number when
doing blind transfers
CALLERID -- Current Caller ID (name and number) # deprecated?
CALLINGPRES -- PRI Call ID Presentation variable for incoming calls
CHANNEL -- Current channel name
CONTEXT -- Current context name
DATETIME -- Current datetime in format: DDMMYYYY-HH:MM:SS
DIALEDPEERNAME -- Name of called party (Broken)
DIALEDPEERNUMBER -- Number of the called party (Broken)
DIALEDTIME -- Time number was dialed
DIALSTATUS -- Status of the call
DNID -- Dialed Number Identifier (limited apparently)
EPOCH -- UNIX-style epoch-based time (seconds since 1 Jan 1970)
EXTEN -- Current extension
HANGUPCAUSE -- Last hangup return code on a Zap channel connected
to a PRI interface
INVALID_EXTEN -- Extension asked for when redirected to the i
(invalid) extension
LANGUAGE -- The current language setting. See Asterisk
multi-language
MEETMESECS -- Number of seconds user participated in a MeetMe
conference
PRIORITY -- Current priority
RDNIS -- The current redirecting DNIS, Caller ID that redirected
the call. Limitations apply.
SIPDOMAIN -- SIP destination domain of an inbound call
(if appropriate)
SIP_CODEC -- Used to set the SIP codec for a call (apparently
broken in Ver 1.0.1, ok in Ver. 1.0.3 & 1.0.4, not sure about
1.0.2)
SIPCALLID -- SIP dialog Call-ID: header
SIPUSERAGENT -- SIP user agent header (remote agent)
TIMESTAMP -- Current datetime in the format: YYYYMMDD-HHMMSS
TXTCIDNAME -- Result of application TXTCIDName
UNIQUEID -- Current call unique identifier
TOUCH_MONITOR -- Used for "one touch record" (see features.conf,
and wW dial flags). If is set on either side of the call then
that var contains the app_args for app_monitor otherwise the
default of WAV||m is used
Returns deferred string value for the key
"""
def stripBrackets( value ):
return value.strip()[1:-1]
command = '''GET VARIABLE "%s"'''%( variable, )
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='0',
).addCallback( self.secondResultItem ).addCallback( stripBrackets )
def hangup( self, channel=None ):
"""Cause the server to hang up on the channel
Returns deferred integer response code
Note: This command just doesn't seem to work with Asterisk 1.2.1,
connected channels just remain connected.
"""
command = "HANGUP"
if channel is not None:
command += ' "%s"'%(channel)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def noop( self, message=None ):
"""Send a null operation to the server. Any message sent
will be printed to the CLI.
Returns deferred integer response code
"""
command = "NOOP"
if message is not None: command += ' "%s"' % message
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def playback( self, filename, doAnswer=1 ):
"""Playback specified file in foreground
filename -- filename to play
doAnswer -- whether to:
-1: skip playback if the channel is not answered
0: playback the sound file without answering first
1: answer the channel before playback, if not yet answered
Note: this just wraps the execute method to issue
a PLAYBACK command.
Returns deferred integer response code
"""
try:
option = { -1:'skip', 0:'noanswer', 1:'answer' }[ doAnswer ]
except KeyError:
raise TypeError, "doAnswer accepts values -1, 0, 1 only (%s given)" % doAnswer
command = 'PLAYBACK "%s"' %( filename, )
if option:
command += ' "%s"' %( option, )
return self.execute( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def receiveChar( self, timeout=None ):
"""Receive a single text char on text-supporting channels (rare)
timeout -- timeout in seconds (Asterisk uses milliseconds)
returns deferred (char, bool(timeout))
"""
command = '''RECEIVE CHAR'''
if timeout is not None:
timeout *= 1000
command += ' %s'%(timeout,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultPlusTimeoutFlag )
def receiveText( self, timeout=None ):
"""Receive text until timeout
timeout -- timeout in seconds (Asterisk uses milliseconds)
Returns deferred string response value (unaltered)
"""
command = '''RECEIVE TEXT'''
if timeout is not None:
timeout *= 1000
command += ' %s'%(timeout,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
)
def recordFile(
self, filename, format, escapeDigits, timeout=-1,
offsetSamples=None, beep=True, silence=None,
):
"""Record channel to given filename until escapeDigits or silence
filename -- filename on the server to which to save
format -- encoding format in which to save data
escapeDigits -- digits which end recording
timeout -- maximum time to record in seconds, -1 gives infinite
(Asterisk uses milliseconds)
offsetSamples -- move into file this number of samples before recording?
XXX check semantics here.
beep -- if true, play a Beep on channel to indicate start of recording
silence -- if specified, silence duration to trigger end of recording
returns deferred (str(code/digits), typeOfExit, endpos)
Where known typeOfExits include:
hangup, code='0'
dtmf, code=digits-pressed
timeout, code='0'
"""
timeout *= 1000
command = '''RECORD FILE "%s" "%s" %s %s'''%(
filename, format, escapeDigits, timeout,
)
if offsetSamples is not None:
command += ' %s'%(offsetSamples,)
if beep:
command += ' BEEP'
if silence is not None:
command += ' s=%s'%(silence,)
def onResult( resultLine ):
value, type, endpos = resultLine.split(' ')
type = type.strip()[1:-1]
endpos = int(endpos.split('=')[1])
return (value, type, endpos)
return self.sendCommand( command ).addCallback(
self.onRecordingComplete
)
def sayXXX( self, baseCommand, value, escapeDigits='' ):
"""Underlying implementation for the common-api sayXXX functions"""
command = '%s %s %r'%( baseCommand, value, escapeDigits or '' )
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def sayAlpha( self, string, escapeDigits=None ):
"""Spell out character string to the user until escapeDigits
returns deferred 0 or the digit pressed
"""
string = "".join([x for x in string if x.isalnum()])
return self.sayXXX( 'SAY ALPHA', string, escapeDigits )
def sayDate( self, date, escapeDigits=None ):
"""Spell out the date (with somewhat unnatural form)
See sayDateTime with format 'ABdY' for a more natural reading
returns deferred 0 or digit-pressed as integer
"""
return self.sayXXX( 'SAY DATE', self.dateAsSeconds(date), escapeDigits )
def sayDigits( self, number, escapeDigits=None ):
"""Spell out the number/string as a string of digits
returns deferred 0 or digit-pressed as integer
"""
number = "".join([x for x in str(number) if x.isdigit()])
return self.sayXXX( 'SAY DIGITS', number, escapeDigits )
def sayNumber( self, number, escapeDigits=None ):
"""Say a number in natural form
returns deferred 0 or digit-pressed as integer
"""
number = "".join([x for x in str(number) if x.isdigit()])
return self.sayXXX( 'SAY NUMBER', number, escapeDigits )
def sayPhonetic( self, string, escapeDigits=None ):
"""Say string using phonetics
returns deferred 0 or digit-pressed as integer
"""
string = "".join([x for x in string if x.isalnum()])
return self.sayXXX( 'SAY PHONETIC', string, escapeDigits )
def sayTime( self, time, escapeDigits=None ):
"""Say string using phonetics
returns deferred 0 or digit-pressed as integer
"""
return self.sayXXX( 'SAY TIME', self.dateAsSeconds(time), escapeDigits )
def sayDateTime( self, time, escapeDigits='', format=None, timezone=None ):
"""Say given date/time in given format until escapeDigits
time -- datetime or float-seconds-since-epoch
escapeDigits -- digits to cancel playback
format -- strftime-style format for the date to be read
'filename' -- filename of a soundfile (single ticks around the filename required)
A or a -- Day of week (Saturday, Sunday, ...)
B or b or h -- Month name (January, February, ...)
d or e -- numeric day of month (first, second, ..., thirty-first)
Y -- Year
I or l -- Hour, 12 hour clock
H -- Hour, 24 hour clock (single digit hours preceded by "oh")
k -- Hour, 24 hour clock (single digit hours NOT preceded by "oh")
M -- Minute
P or p -- AM or PM
Q -- "today", "yesterday" or ABdY (*note: not standard strftime value)
q -- "" (for today), "yesterday", weekday, or ABdY (*note: not standard strftime value)
R -- 24 hour time, including minute
Default format is "ABdY 'digits/at' IMp"
timezone -- optional timezone name from /usr/share/zoneinfo
returns deferred 0 or digit-pressed as integer
"""
command = 'SAY DATETIME %s %r'%( self.dateAsSeconds(time),escapeDigits )
if format is not None:
command += ' %s'%(format,)
if timezone is not None:
command += ' %s'%(timezone,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def sendImage( self, filename ):
"""Send image on those channels which support sending images (rare)
returns deferred integer result code
"""
command = 'SEND IMAGE "%s"'%(filename,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def sendText( self, text ):
"""Send text on text-supporting channels (rare)
returns deferred integer result code
"""
command = "SEND TEXT %r"%( text )
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.resultAsInt )
def setAutoHangup( self, time ):
"""Set channel to automatically hang up after time seconds
time -- time in seconds in the future to hang up...
returns deferred integer result code
"""
command = """SET AUTOHANGUP %s"""%(time,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1', # docs don't show a failure case, actually
).addCallback( self.resultAsInt )
def setCallerID( self, number ):
"""Set channel's caller ID to given number
returns deferred integer result code
"""
command = "SET CALLERID %s"%( number )
return self.sendCommand( command ).addCallback( self.resultAsInt )
def setContext( self, context ):
"""Move channel to given context (no error checking is performed)
returns deferred integer result code
"""
command = """SET CONTEXT %s"""%( context, )
return self.sendCommand( command ).addCallback( self.resultAsInt )
def setExtension( self, extension ):
"""Move channel to given extension (or 'i' if invalid) or drop if neither there
returns deferred integer result code
"""
command = """SET EXTENSION %s"""%( extension, )
return self.sendCommand( command ).addCallback( self.resultAsInt )
def setMusic( self, on=True, musicClass=None ):
"""Enable/disable and/or choose music class for channel's music-on-hold
returns deferred integer result code
"""
command = """SET MUSIC %s"""%( ['OFF','ON'][on], )
if musicClass is not None:
command += " %s"%(musicClass,)
return self.sendCommand( command ).addCallback( self.resultAsInt )
def setPriority( self, priority ):
"""Move channel to given priority or drop if not there
returns deferred integer result code
"""
command = """SET PRIORITY %s"""%( priority, )
return self.sendCommand( command ).addCallback( self.resultAsInt )
def setVariable( self, variable,value ):
"""Set given channel variable to given value
variable -- the variable name passed to the server
value -- the variable value passed to the server, will have
any '"' characters removed in order to allow for " quoting
of the value.
returns deferred integer result code
"""
value = '''"%s"'''%( str(value).replace( '"', '' ), )
command = 'SET VARIABLE "%s" "%s"'%( variable, value )
return self.sendCommand( command ).addCallback( self.resultAsInt )
def streamFile( self, filename, escapeDigits="", offset=0 ):
"""Stream given file until escapeDigits starting from offset
returns deferred (str(digit), int(endpos)) for playback
Note: streamFile is apparently unstable in AGI, may want to use
execute( 'PLAYBACK', ... ) instead (according to the Wiki)
"""
command = 'STREAM FILE "%s" %r'%(filename,escapeDigits)
if offset is not None:
command += ' %s'%(offset)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback( self.onStreamingComplete, skipMS=offset )
def tddMode( self, on=True ):
"""Set TDD mode on the channel if possible (ZAP only ATM)
on -- ON (True), OFF (False) or MATE (None)
returns deferred integer result code
"""
if on is True:
on = 'ON'
elif on is False:
on = 'OFF'
elif on is None:
on = 'MATE'
command = 'TDD MODE %s'%(on,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1', # failure
).addCallback(
self.checkFailure, failure='0', # planned eventual failure case (not capable)
).addCallback(
self.resultAsInt,
)
def verbose( self, message, level=None ):
"""Send a logging message to the asterisk console for debugging etc
message -- text to pass
level -- 1-4 denoting verbosity level
returns deferred integer result code
"""
command = 'VERBOSE %r'%(message,)
if level is not None:
command += ' %s'%(level)
return self.sendCommand( command ).addCallback(
self.resultAsInt,
)
def waitForDigit( self, timeout ):
"""Wait up to timeout seconds for single digit to be pressed
timeout -- timeout in seconds or -1 for infinite timeout
(Asterisk uses milliseconds)
returns deferred 0 on timeout or digit
"""
timeout *= 1000
command = "WAIT FOR DIGIT %s"%(timeout,)
return self.sendCommand( command ).addCallback(
self.checkFailure, failure='-1',
).addCallback(
self.resultAsInt,
)
def wait( self, duration ):
"""Wait for X seconds (just a wrapper around callLater, doesn't talk to server)
returns deferred which fires some time after duration seconds have
passed
"""
df = defer.Deferred( )
reactor.callLater( duration, df.callback, 0 )
return df
class InSequence( object ):
"""Single-shot item creating a set of actions to run in sequence"""
def __init__( self ):
self.actions = []
self.results = []
self.finalDF = None
def append( self, function, *args, **named ):
"""Append an action to the set of actions to process"""
self.actions.append( (function, args, named) )
def __call__( self ):
"""Return deferred that fires when we are finished processing all items"""
return self._doSequence( )
def _doSequence( self ):
"""Return a deferred that does each action in sequence"""
finalDF = defer.Deferred()
self.onActionSuccess( None, finalDF=finalDF )
return finalDF
def recordResult( self, result ):
"""Record the result for later"""
self.results.append( result )
return result
def onActionSuccess( self, result, finalDF ):
"""Handle individual-action success"""
log.debug( 'onActionSuccess: %s', result )
if self.actions:
action = self.actions.pop(0)
log.debug( 'action %s', action )
df = defer.maybeDeferred( action[0], *action[1], **action[2] )
df.addCallback( self.recordResult )
df.addCallback( self.onActionSuccess, finalDF=finalDF )
df.addErrback( self.onActionFailure, finalDF=finalDF )
return df
else:
finalDF.callback( self.results )
def onActionFailure( self, reason, finalDF ):
"""Handle individual-action failure"""
log.debug( 'onActionFailure' )
reason.results = self.results
finalDF.errback( reason )
class FastAGIFactory( protocol.Factory ):
"""Factory generating FastAGI server instances
"""
protocol = FastAGIProtocol
def __init__( self, mainFunction ):
"""Initialise the factory
mainFunction -- function taking a connected FastAGIProtocol instance
this is the function that's run when the Asterisk server connects.
"""
self.mainFunction = mainFunction
| {
"repo_name": "arjan/starpy",
"path": "starpy/fastagi.py",
"copies": "1",
"size": "36232",
"license": "bsd-3-clause",
"hash": 6110702939346550000,
"line_mean": 40.5504587156,
"line_max": 125,
"alpha_frac": 0.5924597041,
"autogenerated": false,
"ratio": 4.522216674987519,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5614676379087519,
"avg_score": null,
"num_lines": null
} |
"""Asterisk Manager Interface for the Twisted networking framework
The Asterisk Manager Interface is a simple line-oriented protocol that allows
for basic control of the channels active on a given Asterisk server.
Module defines a standard Python logging module log 'AMI'
"""
from twisted.internet import protocol, reactor, defer
from twisted.protocols import basic
from twisted.internet import error as tw_error
import socket, logging
from starpy import error
log = logging.getLogger('AMI')
class AMIProtocol(basic.LineOnlyReceiver):
"""Protocol for the interfacing with the Asterisk Manager Interface (AMI)
Provides most of the AMI Action interfaces.
Auto-generates ActionID fields for all calls.
Events and messages are passed around as simple dictionaries with
all-lowercase keys. Values are case-sensitive.
XXX Want to allow for timeouts
Attributes:
count -- total count of messages sent from this protocol
hostName -- used along with count and ID to produce unique IDs
messageCache -- stores incoming message fragments from the manager
id -- An identifier for this instance
"""
count = 0
amiVersion = None
id = None
def __init__(self, *args, **named):
"""Initialise the AMIProtocol, arguments are ignored"""
self.messageCache = []
self.actionIDCallbacks = {}
self.eventTypeCallbacks = {}
self.hostName = socket.gethostname()
def registerEvent(self, event, function):
"""Register callback for the given event-type
event -- string name for the event, None to match all events, or
a tuple of string names to match multiple events.
See http://www.voip-info.org/wiki/view/asterisk+manager+events
for list of events and the data they bear. Includes:
Newchannel -- note that you can receive multiple Newchannel
events for a single channel!
Hangup
Newexten
Newstate
Reload
Shutdown
ExtensionStatus
Rename
Newcallerid
Alarm
AlarmClear
Agentcallbacklogoff
Agentcallbacklogin
Agentlogin
Agentlogoff
MeetmeJoin
MeetmeLeave
MessageWaiting
Join
Leave
AgentCalled
ParkedCall
UnParkedCall
ParkedCalls
Cdr
ParkedCallsComplete
QueueParams
QueueMember
among other standard events. Also includes user-defined events.
function -- function taking (protocol,event) as arguments or None
to deregister the current function.
Multiple functions may be registered for a given event
"""
log.debug('Registering function %s to handle events of type %r', function, event)
if isinstance(event, (str,unicode,type(None))):
event = (event,)
for ev in event:
self.eventTypeCallbacks.setdefault(ev, []).append(function)
def deregisterEvent(self, event, function=None):
"""Deregister callback for the given event-type
event -- event name (or names) to be deregistered, see registerEvent
function -- the function to be removed from the callbacks or None to
remove all callbacks for the event
returns success boolean
"""
log.debug('Deregistering handler %s for events of type %r', function, event)
if isinstance(event, (str,unicode,type(None))):
event = (event,)
success = True
for ev in event:
try:
set = self.eventTypeCallbacks[ ev ]
except KeyError, err:
success = False
else:
try:
while function in set:
set.remove(function)
except (ValueError,KeyError), err:
success = False
if not set or function is None:
try:
del self.eventTypeCallbacks[ ev ]
except KeyError, err:
success = False
return success
def lineReceived(self, line):
"""Handle Twisted's report of an incoming line from the manager"""
log.debug('Line In: %r', line)
self.messageCache.append(line)
if not line.strip():
self.dispatchIncoming() # does dispatch and clears cache
def connectionMade(self):
"""Handle connection to the AMI port (auto-login)
This is a Twisted customisation point, we use it to automatically
log into the connection we've just established.
XXX Should probably use proper Twisted-style credential negotiations
"""
log.info('Connection Made')
df = self.login()
def onComplete(message):
"""Check for success, errback or callback as appropriate"""
if not message['response'] == 'Success':
log.info('Login Failure: %s', message)
self.transport.loseConnection()
self.factory.loginDefer.errback(
error.AMICommandFailure("""Unable to connect to manager""", message)
)
else:
# XXX messy here, would rather have the factory trigger its own
# callback...
log.info('Login Complete: %s', message)
self.factory.loginDefer.callback(
self,
)
def onFailure(reason):
"""Handle failure to connect (e.g. due to timeout)"""
log.info('Login Call Failure: %s', reason.getTraceback())
self.transport.loseConnection()
self.factory.loginDefer.errback(
reason
)
df.addCallbacks(onComplete, onFailure)
def connectionLost(self, reason):
"""Connection lost, clean up callbacks"""
for key,callable in self.actionIDCallbacks.items():
try:
callable(tw_error.ConnectionDone("""FastAGI connection terminated"""))
except Exception, err:
log.error("""Failure during connectionLost for callable %s: %s""", callable, err)
self.actionIDCallbacks.clear()
self.eventTypeCallbacks.clear()
VERSION_PREFIX = 'Asterisk Call Manager'
END_DATA = '--END COMMAND--'
def dispatchIncoming(self):
"""Dispatch any finished incoming events/messages"""
log.debug('Dispatch Incoming')
message = {}
while self.messageCache:
line = self.messageCache.pop(0)
line = line.strip()
if line:
if line.endswith(self.END_DATA):
# multi-line command results...
message.setdefault(' ', []).extend([
l for l in line.split('\n') if (l and l!=self.END_DATA)
])
else:
# regular line...
if line.startswith(self.VERSION_PREFIX):
self.amiVersion = line[len(self.VERSION_PREFIX)+1:].strip()
else:
try:
key,value = line.split(':',1)
except ValueError, err:
# XXX data-safety issues, what prevents the VERSION_PREFIX from
# showing up in a data-set?
log.warn("""Improperly formatted line received and ignored: %r""", line)
else:
message[ key.lower().strip() ] = value.strip()
log.debug('Incoming Message: %s', message)
if message.has_key('actionid'):
key = message['actionid']
callback = self.actionIDCallbacks.get(key)
if callback:
try:
callback(message)
except Exception, err:
# XXX log failure here...
pass
# otherwise is a monitor message or something we didn't send...
if message.has_key('event'):
self.dispatchEvent(message)
def dispatchEvent(self, event):
"""Given an incoming event, dispatch to registered handlers"""
for key in (event['event'], None):
try:
handlers = self.eventTypeCallbacks[ key ]
except KeyError, err:
pass
else:
for handler in handlers:
try:
handler(self, event)
except Exception, err:
# would like the getException code here...
log.error(
'Exception in event handler %s on event %s: %s',
handler, event, err
)
def generateActionId(self):
"""Generate a unique action ID
Assumes that hostName must be unique among all machines which talk
to a given AMI server. With that is combined the memory location of
the protocol object (which should be machine-unique) and the count of
messages that this manager has created so far.
Generally speaking, you shouldn't need to know the action ID, as the
protocol handles the management of them automatically.
"""
self.count += 1
return '%s-%s-%s'%(self.hostName,id(self),self.count)
def sendDeferred(self, message):
"""Send with a single-callback deferred object
Returns deferred that fires when a response to this message is received
"""
df = defer.Deferred()
actionid = self.sendMessage(message, df.callback)
df.addCallbacks(
self.cleanup, self.cleanup,
callbackArgs=(actionid,), errbackArgs=(actionid,)
)
return df
def cleanup(self, result, actionid):
"""Cleanup callbacks on completion"""
try:
del self.actionIDCallbacks[actionid]
except KeyError, err:
pass
return result
def sendMessage(self, message, responseCallback=None):
"""Send the message to the other side, return deferred for the result
returns the actionid for the message
"""
message = dict([(k.lower(),v) for (k,v) in message.items()])
if not message.has_key('actionid'):
message['actionid'] = self.generateActionId()
if responseCallback:
self.actionIDCallbacks[message['actionid']] = responseCallback
log.debug("""MSG OUT: %s""", message)
for key,value in message.items():
self.sendLine('%s: %s'%(str(key.lower()),str(value)))
self.sendLine('')
return message['actionid']
def collectDeferred(self, message, stopEvent):
"""Collect all responses to this message until stopEvent or error
returns deferred returning sequence of events/responses
"""
df = defer.Deferred()
cache = []
def onEvent(event):
if event.get('response') == 'Error':
df.errback(error.AMICommandFailure(event))
elif event['event'] == stopEvent:
df.callback(cache)
else:
cache.append(event)
actionid = self.sendMessage(message, onEvent)
df.addCallbacks(
self.cleanup, self.cleanup,
callbackArgs=(actionid,), errbackArgs=(actionid,)
)
return df
def errorUnlessResponse(self, message, expected='Success'):
"""Raise a AMICommandFailure error unless message['response'] == expected
If == expected, returns the message
"""
if message['response'] != expected:
raise error.AMICommandFailure(message)
return message
## End-user API
def absoluteTimeout(self, channel, timeout):
"""Set timeout value for the given channel (in seconds)"""
message = {'action':'absolutetimeout','timeout':timeout,'channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def agentLogoff(self,agent,soft):
"""Logs off the specified agent for the queue system."""
if soft in (True,'yes',1):
soft='true'
else:
soft='false'
message = {'Action':'AgentLogoff','Agent':agent,'Soft':soft}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def agents(self):
"""Retrieve agents information"""
message = {"action":"agents"}
return self.collectDeferred(message, "AgentsComplete")
def changeMonitor(self, channel, filename):
"""Change the file to which the channel is to be recorded"""
message = {'action':'changemonitor','channel':channel,'filename':filename}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def command(self, command):
"""Run asterisk console command, return deferred result for line of lines
returns deferred returning list of lines (strings) of the command
output.
See listCommands to see available commands
"""
df = self.sendDeferred({'action': 'command', 'command':command })
df.addCallback(self.errorUnlessResponse, expected='Follows')
def onResult(message):
return message[' ']
return df.addCallback(onResult)
def dbGet(self,family,key):
"""This action retrieves a value from the AstDB database"""
df = defer.Deferred()
def extractValue(ami,event):
value = event['val']
return df.callback(value)
message = {'Action':'DBGet','family':family,'key':key}
self.sendDeferred(message).addCallback(self.errorUnlessResponse)
self.registerEvent("DBGetResponse",extractValue)
return df
def dbPut(self,family,key,value):
"""Sets a key value in the AstDB database"""
message = {'Action':'DBPut','Family':family,'Key':key,'Val':value}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def events(self, eventmask=False):
"""Determine whether events are generated"""
if eventmask in ('off',False,0):
eventmask = 'off'
elif eventmask in ('on',True,1):
eventmask = 'on'
# otherwise is likely a type-mask
message = {'action':'events','eventmask':eventmask}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def extensionState(self,exten,context):
"""This command reports the extension state for the given extension. If the extension has a hint, this will report the status of the device connected to the extension
The following are the possible extension states:
-2 Extension removed
-1 Extension hint not found
0 Idle
1 In use
2 Busy"""
message = {'Action':'ExtensionState','Exten':exten,'Context':context}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def getConfig(self,filename):
"""Retrieves the data from an Asterisk configuration file"""
message = {'Action':'GetConfig','filename':filename}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def getVar(self, channel, variable):
"""Retrieve the given variable from the channel"""
def extractVariable(message):
"""When message comes in, extract the variable from it"""
if message.has_key(variable.lower()):
value = message[variable.lower()]
elif message.has_key('value'):
value = message['value']
else:
raise error.AMICommandFailure(message)
if value == '(null)':
value = None
return value
message = {'action':'getvar','channel':channel,'variable':variable}
return self.sendDeferred(
message
).addCallback(
self.errorUnlessResponse
).addCallback(
extractVariable,
)
def hangup(self, channel):
"""Tell channel to hang up"""
message = {'action':'hangup','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def login(self):
"""Log into the AMI interface (done automatically on connection)
Uses factory.username and factory.secret
"""
self.id = self.factory.id
return self.sendDeferred({
'action': 'login',
'username': self.factory.username,
'secret': self.factory.secret,
}).addCallback(self.errorUnlessResponse)
def listCommands(self):
"""List the set of commands available
Returns a single message with each command-name as a key
"""
message = {'action':'listcommands',}
def removeActionId(message):
try:
del message['actionid']
except KeyError, err:
pass
return message
return self.sendDeferred(message).addCallback(
self.errorUnlessResponse
).addCallback(
removeActionId
)
def logoff(self):
"""Log off from the manager instance"""
message = {'action':'logoff'}
return self.sendDeferred(message).addCallback(
self.errorUnlessResponse, expected = 'Goodbye',
)
def mailboxCount(self, mailbox):
"""Get count of messages in the given mailbox"""
message = {'action':'mailboxcount','mailbox':mailbox}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def mailboxStatus(self, mailbox):
"""Get status of given mailbox"""
message = {'action':'mailboxstatus','mailbox':mailbox}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def meetmeMute(self, meetme, usernum):
"""Mute a user in a given meetme"""
message = {'action':'MeetMeMute','meetme':meetme,'usernum':usernum}
return self.sendDeferred(message)
def meetmeUnmute(self,meetme,usernum):
""" Unmute a specified user in a given meetme"""
message = {'action':'meetmeunmute','meetme':meetme,'usernum':usernum}
return self.sendDeferred(message)
def monitor(self, channel, file, format, mix):
"""Record given channel to a file (or attempt to anyway)"""
message = {'action':'monitor','channel':channel,'file':file,'format':format, 'mix':mix}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def originate(
self, channel, context=None, exten=None, priority=None,
timeout=None, callerid=None, account=None, application=None,
data=None, variable={}, async=False
):
"""Originate call to connect channel to given context/exten/priority
channel -- the outgoing channel to which will be dialed
context/exten/priority -- the dialplan coordinate to which to connect
the channel (i.e. where to start the called person)
timeout -- duration before timeout in seconds (note: not Asterisk standard!)
callerid -- callerid to display on the channel
account -- account to which the call belongs
application -- alternate application to Dial to use for outbound dial
data -- data to pass to application
variable -- variables associated to the call
async -- make the origination asynchronous
"""
variable = '|'.join([ "%s=%s" %(x[0], x[1]) for x in variable.items() ])
message = dict([(k,v) for (k,v) in {
'action': 'originate',
'channel':channel,'context':context,'exten':exten,'priority':priority,
'timeout':timeout,'callerid':callerid,'account':account,'application':application,
'data':data, 'variable':variable, 'async':str(async)
}.items() if v is not None])
if message.has_key('timeout'):
message['timeout'] = message['timeout']*1000
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def park(self, channel,channel2,timeout):
"""Park channel"""
message = {'action':'park','channel':channel,'channel2':channel2,'timeout':timeout}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def parkedCall(self):
"""Check for a ParkedCall event"""
message = {'action' : 'ParkedCall'}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def unParkedCall(self):
"""Check for an UnParkedCall event """
message = {'action' : 'UnParkedCall'}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def parkedCalls(self):
"""Retrieve set of parked calls via multi-event callback"""
message = {'action':'ParkedCalls'}
return self.collectDeferred(message, 'ParkedCallsComplete')
def pauseMonitor(self,channel):
"""Temporarily stop recording the channel"""
message = {'action':'pausemonitor','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def ping(self):
"""Check to see if the manager is alive..."""
message = {'action':'ping'}
if self.amiVersion == "1.0":
return self.sendDeferred(message).addCallback(
self.errorUnlessResponse, expected = 'Pong',
)
else:
return self.sendDeferred(message).addCallback(
self.errorUnlessResponse
)
def playDTMF(self, channel, digit):
"""Play DTMF on a given channel"""
message = {'action':'playdtmf','channel':channel,'digit':digit}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def queueAdd(self, queue, interface, penalty=0, paused=True):
"""Add given interface to named queue"""
if paused in (True,'true',1):
paused = 'true'
else:
paused = 'false'
message = {'action':'queueadd','queue':queue,'interface':interface,'penalty':penalty, 'paused':paused}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def queuePause(self,queue, interface, paused = True):
if paused in (True,'true',1):
paused = 'true'
else:
paused = 'false'
message = {'action':'queuepause','queue':queue,'interface':interface,'paused':paused}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def queueRemove(self, queue, interface):
"""Remove given interface from named queue"""
message = {'action':'queueremove','queue':queue,'interface':interface}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def queues(self):
"""Retrieve information about active queues via multiple events"""
# XXX AMI returns improperly formatted lines so this doesn't work now.
message = {'action':'queues'}
#return self.collectDeferred(message, 'QueueStatusEnd')
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def queueStatus(self):
"""Retrieve information about active queues via multiple events"""
message = {'action':'queuestatus'}
return self.collectDeferred(message, 'QueueStatusComplete')
def redirect(self, channel, context, exten, priority, extraChannel=None):
"""Transfer channel(s) to given context/exten/priority"""
message = {
'action':'redirect','channel':channel,'context':context,
'exten':exten,'priority':priority,
}
if extraChannel is not None:
message['extrachannel'] = extraChannel
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def setCDRUserField(self, channel, userField, append=True):
"""Set/add to a user field in the CDR for given channel"""
if append in (True,'true',1):
append = 'true'
else:
append = 'false'
message = {
'channel':channel,'userfield':userField,'append':append,
}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def setVar(self, channel, variable, value):
"""Set channel variable to given value"""
message = {'action':'setvar','channel':channel,'variable':variable,'value':value}
return self.sendDeferred(
message
).addCallback(
self.errorUnlessResponse
)
def sipPeers(self):
"""List all known sip peers"""
# XXX not available on my box...
message = {'action':'sippeers'}
return self.collectDeferred(message, 'PeerlistComplete')
def sipShowPeers(self, peer):
message = {'action':'sipshowpeer','peer':peer}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def status(self, channel=None):
"""Retrieve status for the given (or all) channels via multi-event callback
channel -- channel name or None to retrieve all channels
returns deferred returning list of Status Events for each requested
channel
"""
message = {'action':'Status'}
if channel:
message['channel'] = channel
return self.collectDeferred(message, 'StatusComplete')
def stopMonitor(self, channel):
"""Stop monitoring the given channel"""
message = {'action':'monitor','channel':channel,}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def unpauseMonitor(self, channel):
"""Resume recording a channel"""
message = {'action':'unpausemonitor','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def updateConfig(self, srcfile, dstfile, reload, headers={}):
"""Update a configuration file
headers should be a dictionary with the following keys
Action-XXXXXX
Cat-XXXXXX
Var-XXXXXX
Value-XXXXXX
Match-XXXXXX
"""
message = {}
if reload in (True,'yes',1):
reload='yes'
else:
reload='no'
message = {'action':'updateconfig','srcfilename':srcfile,'dstfilename':dstfile,'reload':reload}
for k,v in headers.items():
message[k] = v
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def userEvent(self,event,**headers):
"""Sends an arbitrary event to the Asterisk Manager Interface."""
message = {'Action':'UserEvent','userevent':event}
for i,j in headers.items():
message[i] =j
return self.sendMessage(message)
def waitEvent(self,timeout):
"""Waits for an event to occur
After calling this action, Asterisk will send you a Success response as soon as another event is queued by the AMI"""
message={'action':'WaitEvent','timeout':timeout}
return self.collectDeferred(message, 'WaitEventComplete')
def zapDNDoff(self,channel):
"""Toggles the do not disturb state on the specified Zap channel to off"""
messge = {'action':'zapDNDoff','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def zapDNDon(self,channel):
"""Toggles the do not disturb state on the specified Zap channel to on"""
messge = {'action':'zapDNDon','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def zapDialOffhook(self,channel,number):
"""Dials the specified number on the Zap channel while the phone is off-hook"""
message = {'Action':'ZapDialOffhook','ZapChannel':channel,'Number':number}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def zapHangup(self,channel):
"""Hangs up the specified Zap channel"""
message = {'Action':'ZapHangup','ZapChannel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def zapRestart(self,channel):
"""Completly restarts the Zaptel channels, terminating any calls in progress"""
message = {'Action':'ZapRestart','ZapChannel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
def zapShowChannels(self):
"""List all zap channels"""
message = {'action':'zapshowchannels'}
return self.collectDeferred(message, 'ZapShowChannelsComplete')
def zapTransfer(self,channel):
"""Transfers zap channel"""
message = {'Action':'ZapTransfer','channel':channel}
return self.sendDeferred(message).addCallback(self.errorUnlessResponse)
class AMIFactory(protocol.ClientFactory):
"""A factory for AMI protocols
"""
protocol = AMIProtocol
def __init__(self, username, secret, id=None):
self.username = username
self.secret = secret
self.id = id
def login(self, ip='localhost', port=5038, timeout=5):
"""Connect, returning our (singleton) protocol instance with login completed
XXX This is messy, we'd much rather have the factory able to create
large numbers of protocols simultaneously
"""
self.loginDefer = defer.Deferred()
reactor.connectTCP(ip, port, self, timeout=timeout)
return self.loginDefer
def clientConnectionFailed(self, connector, reason):
"""Connection failed, report to our callers"""
self.loginDefer.errback(reason)
| {
"repo_name": "arjan/starpy",
"path": "starpy/manager.py",
"copies": "1",
"size": "30059",
"license": "bsd-3-clause",
"hash": -1177239257429791700,
"line_mean": 42.5637681159,
"line_max": 174,
"alpha_frac": 0.6092351708,
"autogenerated": false,
"ratio": 4.657421753951038,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5766656924751038,
"avg_score": null,
"num_lines": null
} |
# Asteroids example game, demonstrating several entity/component design choices.
# Requires Python 3.8+ and the pygame library. Arrow keys and space to control. Esc to quit.
# I suggest playing the game for a minute before looking at the code.
import pygame, math, random, enco
from pygame.locals import *
screensize = 500
# Polar to rectangular coordinate transform. theta = 0 is upward on screen (y is negative)
def vector(r, theta):
return r * math.sin(theta), -r * math.cos(theta)
# POSITION AND MOVEMENT COMPONENTS
# Used by all entities
class PositionVelocity(enco.Component):
"""Entity has a position and velocity."""
def __init__(self):
self.x, self.y = 0, 0
self.vx, self.vy = 0, 0
def think(self, dt):
self.x += dt * self.vx
self.y += dt * self.vy
def screenpos(self):
return int(self.x), int(self.y)
# Used by player
class HasMaxSpeed(enco.Component):
"""Maximum speed is imposed on the entity."""
def __init__(self, maxspeed):
self.maxspeed = maxspeed
def think(self, dt):
v = math.hypot(self.vx, self.vy)
if v > self.maxspeed:
self.vx *= self.maxspeed / v
self.vy *= self.maxspeed / v
# Used by the player, asteroids, and bullets
class WrapScreen(enco.Component):
"""Entity comes on the opposite side of the screen when it goes off."""
def think(self, dt):
self.x %= screensize
self.y %= screensize
# Used by ufos
class CrossesScreen(enco.Component):
"""Entity spawns at a random point on the left or right edge, travels across the screen with a
constant horizontal velocity, periodically changing vertical velocity, and disappearing after
reaching the opposite edge. Wraps around when going off the top or bottom."""
def __init__(self, v0, steerperiod):
self.v0 = v0
self.steerperiod = steerperiod
self.steertime = 0
def spawn(self):
if random.random() < 0.5:
self.x, self.vx = 0, self.v0 # Start at left and move right.
else:
self.x, self.vx = screensize, -self.v0 # Start at right and move left.
self.y = random.uniform(0, screensize)
self.vy = 0
def think(self, dt):
if self.x < 0 and self.vx < 0 or self.x > screensize and self.vx > 0:
self.alive = False
self.y %= screensize
self.steertime += dt
if self.steertime > self.steerperiod:
self.steertime = 0
self.vy = random.uniform(-self.v0, self.v0)
# Used by player
class SpawnsAtCenter(enco.Component):
"""Entity spawns at the center of the screen with 0 velocity."""
def spawn(self):
self.x, self.y = screensize / 2, screensize / 2
self.vx, self.vy = 0, 0
# Used by asteroids
class SpawnsAtEdge(enco.Component):
"""Entity spawns at a random point along the edge of the screen, moving in a random direction
with the given speed."""
def __init__(self, v0):
self.v0 = v0
def spawn(self):
if random.random() < 0.5:
self.x, self.y = 0, random.uniform(0, screensize) # Spawn on left/right edge.
else:
self.x, self.y = random.uniform(0, screensize), 0 # Spawn on top/bottom edge.
self.vx, self.vy = vector(self.v0, random.uniform(0, math.tau))
# Used by player
class RotatesWithArrows(enco.Component):
"""Entity rotates to the right or left when an arrow key is pressed, with the given rotation
rate."""
def __init__(self, turnspeed):
self.turnspeed = turnspeed
self.angle = 0
self.turning = 0
def control(self, keys):
self.turning = (1 if keys[K_RIGHT] else 0) - (1 if keys[K_LEFT] else 0)
def think(self, dt):
self.angle += self.turning * self.turnspeed * dt
# Used by player
class ThrustsWithUp(enco.Component):
"""Entity accelerates forward when the up key is pressed, with the given acceleration."""
def __init__(self, acceleration):
self.acceleration = acceleration
self.thrusting = False
def control(self, keys):
self.thrusting = keys[K_UP]
def think(self, dt):
if self.thrusting:
dvx, dvy = vector(self.acceleration * dt, self.angle)
self.vx += dvx
self.vy += dvy
# WEAPON AND COLLISION COMPONENTS
# Used by ufos and player
class WeaponCooldown(enco.Component):
"""Entity can fire weapon after the specified cooldown time."""
def __init__(self, cooldowntime):
self.cooldowntime = cooldowntime
self.cooldownremaining = 0
def fire(self):
self.cooldownremaining = self.cooldowntime
def trytofire(self):
if self.cooldownremaining == 0:
self.fire()
def think(self, dt):
self.cooldownremaining = max(self.cooldownremaining - dt, 0)
# Used by ufos and player
class FiresBullets(enco.Component):
"""Entity has the capability of firing bullets."""
def __init__(self, bulletspeed):
self.bulletspeed = bulletspeed
def fireindirection(self, angle):
dx, dy = vector(self.size * 1.3, angle)
pos = self.x + dx, self.y + dy
vel = vector(self.bulletspeed, angle)
state.objects.append(Bullet(pos, vel))
# Used by ufos
class FiresRandomDirectionsConstantly(enco.Component):
"""Entity fires in a random direction whenever it's allowed to do so."""
def fire(self):
self.fireindirection(random.uniform(0, math.tau))
def think(self, dt):
self.trytofire()
# Used by player
class FiresForwardWithSpace(enco.Component):
"""Entity fires in the direction it's facing when space bar is pressed."""
def fire(self):
self.fireindirection(self.angle)
def control(self, keys):
if keys[K_SPACE]:
self.trytofire()
# Used by asteroids
class SplitsOnCollision(enco.Component):
"""Entity creates two smaller asteroids (one level smaller) at its position when it collides.
The smaller asteroids have additional velocity of the given magnitude, added to the original
asteroid's velocity."""
def __init__(self, dvsplit):
self.dvsplit = dvsplit
def collide(self):
if self.level <= 1: # Level 1 asteroids don't split.
return
dvx, dvy = vector(self.dvsplit, random.uniform(0, math.tau))
state.objects.extend([
Asteroid(self.level - 1, (self.x, self.y), (self.vx + dvx, self.vy + dvy)),
Asteroid(self.level - 1, (self.x, self.y), (self.vx - dvx, self.vy - dvy)),
])
# Used by ships and asteroids
class ExplodesOnCollision(enco.Component):
"""Entity leaves behind an explosion when it collides."""
def collide(self):
state.effects.append(Explosion((self.x, self.y)))
# OTHER LOGICAL COMPONENTS
# Used by bullets and explosions
class Lifetime(enco.Component):
"""Entity automatically disappears after a set period of time."""
def __init__(self, lifetime):
self.lifetime = lifetime
self.timelived = 0
def think(self, dt):
self.timelived += dt
if self.timelived > self.lifetime:
self.alive = False
# Used by explosions
class Grows(enco.Component):
"""Entity steadily increases in size at the given rate."""
def __init__(self, growthrate):
self.growthrate = growthrate
self.size = 0
def think(self, dt):
self.size += self.growthrate * dt
# GRAPHICS COMPONENTS
# Used by bullets, asteroids, and explosions
class Circular(enco.Component):
"""Entity is drawn as a circle."""
def draw(self, surf):
if self.size < 1:
return
pygame.draw.circle(surf, self.color, self.screenpos(), int(self.size), 1)
# Used by ufos
class Rectangular(enco.Component):
"""Entity is drawn as a rectangle."""
def draw(self, surf):
rect = pygame.Rect((0, 0, int(3 * self.size), int(2 * self.size)))
rect.center = self.screenpos()
pygame.draw.rect(surf, self.color, rect, 1)
# Used by player
class Triangular(enco.Component):
"""Entity is a triangle, oriented in the direction it's facing."""
def makeimg(self):
s = int(self.size)
self.img = pygame.Surface((4 * s, 4 * s)).convert_alpha()
points = (s, 3 * s), (2 * s, 0), (3 * s, 3 * s)
pygame.draw.lines(self.img, self.color, True, points)
def draw(self, surf):
rotimg = pygame.transform.rotate(self.img, -math.degrees(self.angle))
surf.blit(rotimg, rotimg.get_rect(center = self.screenpos()))
# ENTITIES
@PositionVelocity()
class Entity:
alive = True
def collide(self):
self.alive = False
@WrapScreen()
@SpawnsAtEdge(v0 = 50)
@ExplodesOnCollision()
@SplitsOnCollision(dvsplit = 40)
@Circular()
class Asteroid(Entity):
color = 144, 144, 144
def __init__(self, level, pos = None, vel = None):
self.level = level
self.size = 10 + 10 * level
if pos is None: # Created at random position at stage startup.
self.spawn()
else: # Created by larger asteroid splitting into two.
self.x, self.y = pos
self.vx, self.vy = vel
@WrapScreen()
@HasMaxSpeed(100)
@SpawnsAtCenter()
@RotatesWithArrows(turnspeed = 2.5)
@ThrustsWithUp(acceleration = 100)
@WeaponCooldown(0.5)
@FiresBullets(bulletspeed = 200)
@FiresForwardWithSpace()
@ExplodesOnCollision()
@Triangular()
class Player(Entity):
color = 255, 255, 255
size = 10
def __init__(self):
self.spawn()
self.makeimg()
@CrossesScreen(v0 = 30, steerperiod = 3)
@WeaponCooldown(0.5)
@FiresBullets(bulletspeed = 200)
@FiresRandomDirectionsConstantly()
@ExplodesOnCollision()
@Rectangular()
class Ufo(Entity):
color = 127, 255, 127
size = 15
def __init__(self):
self.spawn()
@WrapScreen()
@Circular()
@Lifetime(1.5)
class Bullet(Entity):
color = 255, 127, 127
size = 2
def __init__(self, pos, vel):
self.x, self.y = pos
self.vx, self.vy = vel
@Lifetime(0.5)
@Grows(growthrate = 80)
@Circular()
class Explosion(Entity):
color = 0, 0, 127
def __init__(self, pos):
self.x, self.y = pos
class Gamestate:
def __init__(self):
self.stage = 0
self.deaths = 0
self.hudfont = pygame.font.Font(None, 30)
self.restart()
def restart(self):
self.player = Player()
# Entities that interact with each other
self.objects = [self.player] + [Asteroid(3) for _ in range(self.stage + 2)]
# Non-interacting entities, i.e. graphical effects (explosions)
self.effects = []
self.ufospawntime = 0
self.ufospawnperiod = 30.0 * 0.9 ** self.stage
self.restarting = False
self.restarttime = 0
def control(self, keys):
if self.player.alive:
self.player.control(keys)
def think(self, dt):
self.ufospawntime += dt
if not self.restarting and self.ufospawntime > self.ufospawnperiod:
self.ufospawntime = 0
self.objects.append(Ufo())
for entity in self.objects + self.effects:
entity.think(dt)
# The collision logic is: any two objects will collide if they're of different types. So
# bullets don't collide with other bullets, asteroids don't collide with other asteroids.
for i in range(len(self.objects)):
for j in range(i):
obj0, obj1 = self.objects[i], self.objects[j]
if type(obj0) is type(obj1):
continue
# Treat all objects as circular for the purpose of collision detection.
# Miss the case where the objects are near opposite edges, close enough to collide
# if you count screen wrap. It's not very noticeable so don't worry about it.
if math.dist((obj0.x, obj0.y), (obj1.x, obj1.y)) < obj0.size + obj1.size:
obj0.collide()
obj1.collide()
self.objects = [obj for obj in self.objects if obj.alive]
self.effects = [effect for effect in self.effects if effect.alive]
if self.restarting:
self.restarttime += dt
if self.restarttime > 2:
self.restart()
elif not any(isinstance(obj, Asteroid) for obj in self.objects): # Win condition
self.stage += 1
self.restarting = True
elif not self.player.alive: # Lose condition
self.deaths += 1
self.restarting = True
def draw(self, surf):
surf.fill((0, 0, 0))
for entity in self.objects + self.effects:
entity.draw(surf)
hudcolor = 255, 255, 0
surf.blit(self.hudfont.render("Stage: %d" % self.stage, True, hudcolor), (5, 5))
surf.blit(self.hudfont.render("Deaths: %d" % self.deaths, True, hudcolor), (5, 30))
pygame.font.init()
screen = pygame.display.set_mode((screensize, screensize))
pygame.display.set_caption("enco Asteroids")
state = Gamestate()
clock = pygame.time.Clock()
def isquit(event):
return event.type == QUIT or (event.type == KEYDOWN and event.key == K_ESCAPE)
while not any(isquit(event) for event in pygame.event.get()):
dt = clock.tick(60) * 0.001
state.control(pygame.key.get_pressed())
state.think(dt)
state.draw(screen)
pygame.display.flip()
| {
"repo_name": "cosmologicon/enco",
"path": "example-asteroids.py",
"copies": "1",
"size": "11967",
"license": "unlicense",
"hash": -210189664801027260,
"line_mean": 30.0831168831,
"line_max": 95,
"alpha_frac": 0.6984206568,
"autogenerated": false,
"ratio": 2.8753003363767418,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.40737209931767415,
"avg_score": null,
"num_lines": null
} |
# "ASTEROIDS"
# Nikolay Grishchenko, 2014
# web@grischenko.ru
#
# PLEASE BE AWARE THAT THIS CODE IS ADOPTED ONLY
# FOR CODESKULPTOR IN COURSERA CLASS.
# IT DOES NOT COMPILE AS IS!
#
import simplegui
import math
from random import random, randrange, choice
# Game settings. Constants.
LIVES = 3 # Default lives
WIDTH = 800 # Screen Width
HEIGHT = 600 # Screen Height
THRUST = 0.1 # Acceleration when thrust is ON
TURN_SPEED = 0.1 # Angular velocity modifier
FRICTION = 0.98 # Pseudo friction modifier
POINTS_FOR_ROCK = 10 # Scores for each asteroid
EXPLOSION_FRAMES = 24 # Explosion animation frames
ROCK_LIFETIME = 0 # Asteroid selfdestroy timeout. 0 = n/a
ROCK_LIMIT = 10 # Maximum asteroids in space.
ROCK_SPEEDUP = [20.0, 50.0] # Smaller vals fasten rocks sooner
MIN_DISTANCE = 10 # Minimum distance from ship for new rocks
MISSILE_LIFETIME = 1 # Missile selfdestroy timeout
MISSILE_THRUST = 3 # Initial speed of the missile.
MISSILE_SPEEDUP = 100.0 # Smaller vals increase faster
# Global variables
""" You can use arrows or wa[s]d to control the ship. Space
to fire. 1st element is type of vector, 2nd - direction """
inputs = { 'w': [0, 1],
'a': [1, -1],
'd': [1, 1],
'up': [0, 1],
'left': [1, -1],
'right': [1, 1],
'space': [2, 1]
}
best = 0 # Best results score
class ImageInfo:
""" Parameters of images should be defined in this Class """
def __init__(self, center, size, radius = 0, lifespan = None, animated = False):
self.center = center
self.size = size
self.radius = radius
if lifespan:
self.lifespan = lifespan
else:
self.lifespan = float('inf')
self.animated = animated
def get_center(self):
return self.center
def get_size(self):
return self.size
def get_radius(self):
return self.radius
def get_lifespan(self):
return self.lifespan
def get_animated(self):
return self.animated
# art assets created by Kim Lathrop, may be freely re-used in non-commercial projects, please credit Kim
# debris images - debris1_brown.png, debris2_brown.png, debris3_brown.png, debris4_brown.png
# debris1_blue.png, debris2_blue.png, debris3_blue.png, debris4_blue.png, debris_blend.png
debris_info = ImageInfo([320, 240], [640, 480])
debris_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/debris2_blue.png")
# nebula images - nebula_brown.png, nebula_blue.png
nebula_info = ImageInfo([400, 300], [800, 600])
nebula_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/nebula_blue.f2014.png")
# splash image
splash_info = ImageInfo([200, 150], [400, 300])
splash_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/splash.png")
# ship image
ship_info = ImageInfo([45, 45], [90, 90], 35)
ship_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/double_ship.png")
# missile image - shot1.png, shot2.png, shot3.png
missile_info = ImageInfo([5,5], [10, 10], 3, MISSILE_LIFETIME)
missile_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/shot2.png")
# asteroid images - asteroid_blue.png, asteroid_brown.png, asteroid_blend.png
if ROCK_LIFETIME:
asteroid_info = ImageInfo([45, 45], [90, 90], 40, ROCK_LIFETIME)
else:
asteroid_info = ImageInfo([45, 45], [90, 90], 40)
asteroid_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/asteroid_blue.png")
# animated explosion - explosion_orange.png, explosion_blue.png, explosion_blue2.png, explosion_alpha.png
explosion_info = ImageInfo([64, 64], [128, 128], 17, 0.4, True)
explosion_image = simplegui.load_image("http://commondatastorage.googleapis.com/codeskulptor-assets/lathrop/explosion_alpha.png")
# sound assets purchased from sounddogs.com, please do not redistribute
soundtrack = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/soundtrack.mp3")
missile_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/missile.mp3")
missile_sound.set_volume(.5)
ship_thrust_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/thrust.mp3")
explosion_sound = simplegui.load_sound("http://commondatastorage.googleapis.com/codeskulptor-assets/sounddogs/explosion.mp3")
####################
# Helper functions #
def angle_to_vector(ang):
return [math.cos(ang), math.sin(ang)]
def dist(p,q):
return math.sqrt((p[0] - q[0]) ** 2+(p[1] - q[1]) ** 2)
###########
# Classes #
class Ship:
""" This class should really be a child of Sprite because there
are many same actions. But I am lazy to make refactoring now. """
def __init__(self, pos, vel, angle, image, info):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.thrust = False
self.angle = angle
self.angle_vel = 0
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
def draw(self,canvas):
""" Draw image and play sound with respect to self.thrust value """
if self.thrust:
canvas.draw_image(self.image,
[self.image_center[0] + self.image_size[0], self.image_center[0]],
self.image_size, self.pos, self.image_size, self.angle)
ship_thrust_sound.play()
else:
canvas.draw_image(self.image, self.image_center, self.image_size,
self.pos, self.image_size, self.angle)
ship_thrust_sound.rewind()
def update(self):
self.angle += self.angle_vel # Update angle
for p in range(2): # Update position
self.pos[p] += self.vel[p]
# This update is a bit complex so that the ship wraps
# only when it is completely out of screen (not center)
self.pos[0] = ((self.pos[0] + self.radius) % (WIDTH + self.radius * 2)) - self.radius
self.pos[1] = ((self.pos[1] + self.radius) % (HEIGHT + self.radius * 2)) - self.radius
if self.thrust: # Update velocity
self.vel = [self.vel[i] + (angle_to_vector(self.angle)[i] * THRUST)\
for i in range(len(self.vel))]
# Make some friction also used for speed limit.
self.vel = [self.vel[i] * FRICTION for i in range(len(self.vel))]
def update_vel(self, v):
""" This updates thrust and angular velocity.
'inputs' global dictionary helps understand this function """
if v[0] == 0:
self.thrust = True if v[1] == 1 else False
elif v[0] == 1:
self.angle_vel = v[1] * TURN_SPEED
def get_params(self):
""" Returns all Ship properties in a dictionary. """
return({'pos': self.pos,
'vel': self.vel,
'angle': self.angle,
'radius': self.radius,
'cannon': [self.pos[0]+math.cos(self.angle)*self.radius,
self.pos[1]+math.sin(self.angle)*self.radius]
})
class Sprite:
""" Main class for 'space' objects. """
def __init__(self, pos, vel, ang, ang_vel, image, info, sound = None):
self.pos = [pos[0],pos[1]]
self.vel = [vel[0],vel[1]]
self.angle = ang
self.angle_vel = ang_vel
self.image = image
self.image_center = info.get_center()
self.image_size = info.get_size()
self.radius = info.get_radius()
self.lifespan = info.get_lifespan()
self.animated = info.get_animated()
self.age = 0
if sound:
sound.rewind()
sound.play()
def get_obj_type(self):
return self.obj_type
def draw(self, canvas):
canvas.draw_image(self.image, self.image_center, self.image_size,\
self.pos, self.image_size, self.angle)
def update(self):
self.angle += self.angle_vel # Update angle
for p in range(2): # Update position
self.pos[p] += self.vel[p]
# Warps object when it goes of the screen
self.pos[0] = ((self.pos[0] + self.radius) % (WIDTH + self.radius * 2)) - self.radius
self.pos[1] = ((self.pos[1] + self.radius) % (HEIGHT + self.radius * 2)) - self.radius
self.age += 0.01666667 # Approximately 1/60 of a second or 1 Tick
# Self destruct object if too old
if self.age >= self.lifespan:
if self in missiles:
missiles.remove(self)
elif self in rocks:
rocks.remove(self)
elif self in explosions:
explosions.remove(self)
# Animate
if self.animated:
ii = ((self.age % EXPLOSION_FRAMES) // 1) + 1
self.image_center = [self.image_center[0] / ii + self.image_size[0] * ii,
self.image_center[1]]
def collide(self, obj):
""" Returns True if obj collides with self """
return dist(self.pos, obj.get_params()['pos']) < (self.radius + obj.get_params()['radius'])
def get_params(self):
""" Returns main Sprite properties in a dictionary. """
return({'pos': self.pos,
'vel': self.vel,
'angle': self.angle,
'radius': self.radius
})
def set_lifespan(self, t):
""" Sets the new self.lifespan and resets age """
self.lifespan = t
self.age = 0
######################
# Gameplay functions #
def game_reset():
""" Resets global variables (game over) """
global running, score, lives, time, missiles, rocks, explosions, my_ship
running = False # Game running flag
score = 0 # Initial score
lives = LIVES # Initial player "lives".
time = 0.5 # Some Joe's magic for background animation
missiles = set([]) # Collection of missile objects
rocks = set([]) # Collection of rock objects
explosions = set([])# Collection of explosion objects
my_ship = Ship([WIDTH / 2, HEIGHT / 2], [0, 0], 0, ship_image, ship_info)
soundtrack.rewind()
soundtrack.play()
def group_update(group, canvas):
""" Update status and draw elements of given group """
for i in group:
i.update()
i.draw(canvas)
def group_collide(group, obj):
""" Any group element to collide with obj explodes """
""" Number of explosions or False is returned """
r = False
for i in group:
if i.collide(obj):
explode(i) # Draw explosion
i.set_lifespan(0) # Initiate object self destruction
r += 1
return r
def explode(obj):
""" Draws explosion at position of given object """
params = obj.get_params()
explosions.add(Sprite([params['pos'][0], params['pos'][1]], [0, 0],
0, 0, explosion_image, explosion_info))
explosion_sound.rewind()
explosion_sound.play()
def group_group_collide(group, group2):
""" Checks collisions between groups. Elements of group2
explode on collision and total number is returned. """
r = set([])
for i in group:
if group_collide(group2, i):
r.add(i)
group.difference_update(r)
return len(r)
def fire_missile():
""" The missile fires with constant speed in the
direction of the cannon + current ship velocity is
added to this vector. This looks realistic. """
lp = my_ship.get_params() # lp stands for "launch point"
# Missile self speed increases with score to compensate rocks
m_speed = MISSILE_THRUST
m_speed += MISSILE_THRUST * (score / MISSILE_SPEEDUP) if score > 0 else 0
missiles.add(Sprite([lp['cannon'][0], lp['cannon'][1]],
[lp['vel'][0] + math.cos(lp['angle']) * m_speed,
lp['vel'][1] + math.sin(lp['angle']) * m_speed],
0, 0, missile_image, missile_info,
missile_sound) )
def rock_spawner():
""" Spawns a new rock if required. """
new_pos = [randrange(0, WIDTH), randrange(0, HEIGHT)]
# If it happens that a new rock wants to spawn too close to the ship,
# we skip this attempt. I'm lazy to retry spawn. Will autoretry in 500ms.
if dist(new_pos, my_ship.get_params()['pos']) < \
MIN_DISTANCE + my_ship.get_params()['radius'] + asteroid_info.get_radius():
rock_spawner()
return
# We increase the speed of new rocks as the score grows
# Horizontal speed is increased faster than vertical
new_vel = [random() * choice([-1, 1]) * (score / ROCK_SPEEDUP[0]),
random() * choice([-1, 1]) * (score / ROCK_SPEEDUP[1])]
# Spawn new rock if game running and not enough rocks
if running and len(rocks) < ROCK_LIMIT:
rocks.add(Sprite(new_pos, new_vel,
randrange(0, 6), # ~ 0-360 degrees
random() * choice([-1, 1]) / 20,
asteroid_image, asteroid_info))
# Call updates and draw elements
def draw(canvas):
global time, lives, score, best
# Animate background //by Joe and his guys
time += 1
wtime = (time / 4) % WIDTH
center = debris_info.get_center()
size = debris_info.get_size()
canvas.draw_image(nebula_image, nebula_info.get_center(), nebula_info.get_size(), [WIDTH / 2, HEIGHT / 2], [WIDTH, HEIGHT])
canvas.draw_image(debris_image, center, size, (wtime - WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
canvas.draw_image(debris_image, center, size, (wtime + WIDTH / 2, HEIGHT / 2), (WIDTH, HEIGHT))
# Draw score and lifes
canvas.draw_text('Best Score: ' + str(best), [50, 30], 38, 'White')
canvas.draw_text('Score: ' + str(score), [WIDTH - 170, 30], 38, 'Lime')
canvas.draw_text('Lives: ' + str(lives), [WIDTH - 170, 70], 38, 'Lime')
# Update ship and sprites
my_ship.update()
my_ship.draw(canvas)
group_update(rocks, canvas)
group_update(missiles, canvas)
group_update(explosions, canvas)
# Check collisions and update score/lives respectively
if group_collide(rocks, my_ship):
lives -= 1
score += group_group_collide(rocks, missiles) * POINTS_FOR_ROCK
best = score if score > best else best
if lives <= 0:
game_reset()
# Draw splash image when not running game
if not running:
canvas.draw_image(splash_image, splash_info.get_center(),
splash_info.get_size(), [WIDTH / 2, HEIGHT / 2],
splash_info.get_size())
# Interface Event Handlers
def click(pos):
global running
center = [WIDTH / 2, HEIGHT / 2]
size = splash_info.get_size()
inwidth = (center[0] - size[0] / 2) < pos[0] < (center[0] + size[0] / 2)
inheight = (center[1] - size[1] / 2) < pos[1] < (center[1] + size[1] / 2)
if (not running) and inwidth and inheight:
running = True
def key_down(k):
if not running: return
if k == simplegui.KEY_MAP['space']:
fire_missile()
return
for i in inputs:
if k == simplegui.KEY_MAP[i]:
my_ship.update_vel(inputs[i])
def key_up(k):
if not running: return
for i in inputs:
if k == simplegui.KEY_MAP[i]:
my_ship.update_vel([inputs[i][0],0])
##########################
# Make the puppets dance #
frame = simplegui.create_frame("Asteroids", WIDTH, HEIGHT)
frame.set_draw_handler(draw)
timer = simplegui.create_timer(500.0, rock_spawner)
frame.set_keydown_handler(key_down)
frame.set_keyup_handler(key_up)
frame.set_mouseclick_handler(click)
game_reset()
timer.start()
frame.start()
| {
"repo_name": "ngr/sandbox",
"path": "python/asteroids.py",
"copies": "1",
"size": "15978",
"license": "mit",
"hash": 9059257902395820000,
"line_mean": 37.6876513317,
"line_max": 129,
"alpha_frac": 0.6085242208,
"autogenerated": false,
"ratio": 3.239002635313197,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9289225801342548,
"avg_score": 0.01166021095412983,
"num_lines": 413
} |
# astests_pyb.py
# Tests for AS_GPS module. Emulates a GPS unit using a UART loopback.
# Run on a Pyboard with X1 and X2 linked
# Tests for AS_GPS module (asynchronous GPS device driver)
# Based on tests for MicropyGPS by Michael Calvin McCoy
# https://github.com/inmcm/micropyGPS
# Copyright (c) 2018-2020 Peter Hinch
# Released under the MIT License (MIT) - see LICENSE file
# Ported to uasyncio V3 OK.
from .as_GPS import *
from machine import UART
import uasyncio as asyncio
def callback(gps, _, arg):
print('Fix callback. Time:', gps.utc, arg)
async def run_tests():
uart = UART(4, 9600, read_buf_len=200)
swriter = asyncio.StreamWriter(uart, {})
sreader = asyncio.StreamReader(uart)
sentence_count = 0
test_RMC = ['$GPRMC,180041.896,A,3749.1851,N,08338.7891,W,001.9,154.9,240911,,,A*7A\n',
'$GPRMC,180049.896,A,3749.1808,N,08338.7869,W,001.8,156.3,240911,,,A*70\n',
'$GPRMC,092751.000,A,5321.6802,N,00630.3371,W,0.06,31.66,280511,,,A*45\n']
test_VTG = ['$GPVTG,232.9,T,,M,002.3,N,004.3,K,A*01\n']
test_GGA = ['$GPGGA,180050.896,3749.1802,N,08338.7865,W,1,07,1.1,397.4,M,-32.5,M,,0000*6C\n']
test_GSA = ['$GPGSA,A,3,07,11,28,24,26,08,17,,,,,,2.0,1.1,1.7*37\n',
'$GPGSA,A,3,07,02,26,27,09,04,15,,,,,,1.8,1.0,1.5*33\n']
test_GSV = ['$GPGSV,3,1,12,28,72,355,39,01,52,063,33,17,51,272,44,08,46,184,38*74\n',
'$GPGSV,3,2,12,24,42,058,33,11,34,053,33,07,20,171,40,20,15,116,*71\n',
'$GPGSV,3,3,12,04,12,204,34,27,11,324,35,32,11,089,,26,10,264,40*7B\n',
'$GPGSV,3,1,11,03,03,111,00,04,15,270,00,06,01,010,00,13,06,292,00*74\n',
'$GPGSV,3,2,11,14,25,170,00,16,57,208,39,18,67,296,40,19,40,246,00*74\n',
'$GPGSV,3,3,11,22,42,067,42,24,14,311,43,27,05,244,00,,,,*4D\n',
'$GPGSV,4,1,14,22,81,349,25,14,64,296,22,18,54,114,21,51,40,212,*7D\n',
'$GPGSV,4,2,14,24,30,047,22,04,22,312,26,31,22,204,,12,19,088,23*72\n',
'$GPGSV,4,3,14,25,17,127,18,21,16,175,,11,09,315,16,19,05,273,*72\n',
'$GPGSV,4,4,14,32,05,303,,15,02,073,*7A\n']
test_GLL = ['$GPGLL,3711.0942,N,08671.4472,W,000812.000,A,A*46\n',
'$GPGLL,4916.45,N,12311.12,W,225444,A,*1D\n',
'$GPGLL,4250.5589,S,14718.5084,E,092204.999,A*2D\n',
'$GPGLL,4250.5589,S,14718.5084,E,092204.999,A*2D\n',]
# '$GPGLL,0000.0000,N,00000.0000,E,235947.000,V*2D\n', # Will ignore this one
my_gps = AS_GPS(sreader, fix_cb=callback, fix_cb_args=(42,))
sentence = ''
for sentence in test_RMC:
sentence_count += 1
await swriter.awrite(sentence)
await my_gps.data_received(date=True)
print('Longitude:', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Time:', my_gps.utc)
print('Speed:', my_gps.speed())
print('Date Stamp:', my_gps.date)
print('Course', my_gps.course)
print('Data is Valid:', my_gps._valid)
print('Compass Direction:', my_gps.compass_direction())
print('')
for sentence in test_GLL:
sentence_count += 1
await swriter.awrite(sentence)
await my_gps.data_received(position=True)
print('Longitude:', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Time:', my_gps.utc)
print('Data is Valid:', my_gps._valid)
print('')
for sentence in test_VTG:
print('Test VTG', sentence)
sentence_count += 1
await swriter.awrite(sentence)
await asyncio.sleep_ms(200) # Can't wait for course because of position check
print('Speed:', my_gps.speed())
print('Course', my_gps.course)
print('Compass Direction:', my_gps.compass_direction())
print('')
for sentence in test_GGA:
sentence_count += 1
await swriter.awrite(sentence)
await my_gps.data_received(position=True)
print('Longitude', my_gps.longitude())
print('Latitude', my_gps.latitude())
print('UTC Time:', my_gps.utc)
# print('Fix Status:', my_gps.fix_stat)
print('Altitude:', my_gps.altitude)
print('Height Above Geoid:', my_gps.geoid_height)
print('Horizontal Dilution of Precision:', my_gps.hdop)
print('Satellites in Use by Receiver:', my_gps.satellites_in_use)
print('')
for sentence in test_GSA:
sentence_count += 1
await swriter.awrite(sentence)
await asyncio.sleep_ms(200)
print('Satellites Used', my_gps.satellites_used)
print('Horizontal Dilution of Precision:', my_gps.hdop)
print('Vertical Dilution of Precision:', my_gps.vdop)
print('Position Dilution of Precision:', my_gps.pdop)
print('')
for sentence in test_GSV:
sentence_count += 1
await swriter.awrite(sentence)
await asyncio.sleep_ms(200)
print('SV Sentences Parsed', my_gps._last_sv_sentence)
print('SV Sentences in Total', my_gps._total_sv_sentences)
print('# of Satellites in View:', my_gps.satellites_in_view)
data_valid = my_gps._total_sv_sentences > 0 and my_gps._total_sv_sentences == my_gps._last_sv_sentence
print('Is Satellite Data Valid?:', data_valid)
if data_valid:
print('Satellite Data:', my_gps._satellite_data)
print('Satellites Visible:', list(my_gps._satellite_data.keys()))
print('')
print("Pretty Print Examples:")
print('Latitude (degs):', my_gps.latitude_string(DD))
print('Longitude (degs):', my_gps.longitude_string(DD))
print('Latitude (dms):', my_gps.latitude_string(DMS))
print('Longitude (dms):', my_gps.longitude_string(DMS))
print('Latitude (kml):', my_gps.latitude_string(KML))
print('Longitude (kml):', my_gps.longitude_string(KML))
print('Latitude (degs, mins):', my_gps.latitude_string())
print('Longitude (degs, mins):', my_gps.longitude_string())
print('Speed:', my_gps.speed_string(KPH), 'or',
my_gps.speed_string(MPH), 'or',
my_gps.speed_string(KNOT))
print('Date (Long Format):', my_gps.date_string(LONG))
print('Date (Short D/M/Y Format):', my_gps.date_string(DMY))
print('Date (Short M/D/Y Format):', my_gps.date_string(MDY))
print('Time:', my_gps.time_string())
print()
print('### Final Results ###')
print('Sentences Attempted:', sentence_count)
print('Sentences Found:', my_gps.clean_sentences)
print('Sentences Parsed:', my_gps.parsed_sentences)
print('Unsupported sentences:', my_gps.unsupported_sentences)
print('CRC_Fails:', my_gps.crc_fails)
asyncio.run(run_tests())
| {
"repo_name": "peterhinch/micropython-async",
"path": "v3/as_drivers/as_GPS/astests_pyb.py",
"copies": "1",
"size": "6747",
"license": "mit",
"hash": -2437674435312698000,
"line_mean": 43.6821192053,
"line_max": 110,
"alpha_frac": 0.60560249,
"autogenerated": false,
"ratio": 2.759509202453988,
"config_test": true,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3865111692453988,
"avg_score": null,
"num_lines": null
} |
import ast, copy
from ast_utils import *
class Inliner:
def setup_inliner(self, writer):
self.writer = writer
self._with_inline = False
self._inline = []
self._inline_ids = 0
self._inline_breakout = False
def inline_helper_remap_names(self, remap):
return "JS('var %s')" %','.join(remap.values())
def inline_helper_return_id(self, return_id):
return "JS('var __returns__%s = null')"%return_id
def inline_function(self, node):
name = self.visit(node.func)
fnode = self._global_functions[ name ]
fnode = copy.deepcopy( fnode )
finfo = inspect_function( fnode )
remap = {}
for n in finfo['name_nodes']:
if n.id not in finfo['locals']: continue
if isinstance(n.id, ast.Name):
raise RuntimeError
if n.id not in remap:
new_name = n.id + '_%s'%self._inline_ids
remap[ n.id ] = new_name
self._inline_ids += 1
n.id = remap[ n.id ]
if remap:
self.writer.write( self.inline_helper_remap_names(remap) )
for n in remap:
if n in finfo['typedefs']:
self._func_typedefs[ remap[n] ] = finfo['typedefs'][n]
offset = len(fnode.args.args) - len(fnode.args.defaults)
for i,ad in enumerate(fnode.args.args):
if i < len(node.args):
ac = self.visit( node.args[i] )
else:
assert fnode.args.defaults
dindex = i - offset
ac = self.visit( fnode.args.defaults[dindex] )
ad = remap[ self.visit(ad) ]
self.writer.write( "%s = %s" %(ad, ac) )
return_id = name + str(self._inline_ids)
self._inline.append( return_id )
self.writer.write( self.inline_helper_return_id( return_id ))
#if len( finfo['return_nodes'] ) > 1: ## TODO fix me
if True:
self._inline_breakout = True
self.writer.write('while True:')
self.writer.push()
for b in fnode.body:
self.visit(b)
if not len( finfo['return_nodes'] ):
self.writer.write('break')
self.writer.pull()
#self._inline_breakout = False
else:
for b in fnode.body:
self.visit(b)
if self._inline.pop() != return_id:
raise RuntimeError
for n in remap:
gname = remap[n]
for n in finfo['name_nodes']:
if n.id == gname:
n.id = n
return '__returns__%s' %return_id
| {
"repo_name": "chiyama/PythonJS",
"path": "pythonjs/inline_function.py",
"copies": "6",
"size": "2238",
"license": "bsd-3-clause",
"hash": -6951807192025397000,
"line_mean": 24.4318181818,
"line_max": 63,
"alpha_frac": 0.6327077748,
"autogenerated": false,
"ratio": 2.7392900856793148,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.04075124335489158,
"num_lines": 88
} |
# Copyright (c) 2018-2020 Peter Hinch
# Released under the MIT License (MIT) - see LICENSE file
# TODO Test machine version. Replace LED with callback. Update tests and doc.
import uasyncio as asyncio
import machine
try:
import pyb
on_pyboard = True
rtc = pyb.RTC()
except ImportError:
on_pyboard = False
import utime
import micropython
import gc
from .as_GPS import RMC, AS_GPS
from .as_rwGPS import GPS
micropython.alloc_emergency_exception_buf(100)
# Convenience function. Return RTC seconds since midnight as float
def rtc_secs():
if not on_pyboard:
raise OSError('Only available on STM targets.')
dt = rtc.datetime()
return 3600*dt[4] + 60*dt[5] + dt[6] + (255 - dt[7])/256
# Constructor for GPS_Timer class
def gps_ro_t_init(self, sreader, pps_pin, local_offset=0,
fix_cb=lambda *_ : None, cb_mask=RMC, fix_cb_args=(),
pps_cb=lambda *_ : None, pps_cb_args=()):
AS_GPS.__init__(self, sreader, local_offset, fix_cb, cb_mask, fix_cb_args)
self.setup(pps_pin, pps_cb, pps_cb_args)
# Constructor for GPS_RWTimer class
def gps_rw_t_init(self, sreader, swriter, pps_pin, local_offset=0,
fix_cb=lambda *_ : None, cb_mask=RMC, fix_cb_args=(),
msg_cb=lambda *_ : None, msg_cb_args=(),
pps_cb=lambda *_ : None, pps_cb_args=()):
GPS.__init__(self, sreader, swriter, local_offset, fix_cb, cb_mask, fix_cb_args,
msg_cb, msg_cb_args)
self.setup(pps_pin, pps_cb, pps_cb_args)
class GPS_Tbase():
def setup(self, pps_pin, pps_cb, pps_cb_args):
self._pps_pin = pps_pin
self._pps_cb = pps_cb
self._pps_cb_args = pps_cb_args
self.msecs = None # Integer time in ms since midnight at last PPS
self.t_ms = 0 # ms since midnight
self.acquired = None # Value of ticks_us at edge of PPS
self._rtc_set = False # Set RTC flag
self._rtcbuf = [0]*8 # Buffer for RTC setting
self._time = [0]*4 # get_t_split() time buffer.
asyncio.create_task(self._start())
async def _start(self):
await self.data_received(date=True)
self._pps_pin.irq(self._isr, trigger = machine.Pin.IRQ_RISING)
def close(self):
self._pps_pin.irq(None)
# If update rate > 1Hz, when PPS edge occurs the last RMC message will have
# a nonzero ms value. Need to set RTC to 1 sec after the last 1 second boundary
def _isr(self, _):
acquired = utime.ticks_us() # Save time of PPS
# Time in last NMEA sentence was time of last PPS.
# Reduce to integer secs since midnight local time.
isecs = (self.epoch_time + int(3600*self.local_offset)) % 86400
# ms since midnight (28 bits). Add in any ms in RMC data
msecs = isecs * 1000 + self.msecs
# This PPS is presumed to be one update later
msecs += self._update_ms
if msecs >= 86400000: # Next PPS will deal with rollover
return
if self.t_ms == msecs: # No RMC message has arrived: nothing to do
return
self.t_ms = msecs # Current time in ms past midnight
self.acquired = acquired
# Set RTC if required and if last RMC indicated a 1 second boundary
if self._rtc_set:
# Time as int(seconds) in last NMEA sentence. Earlier test ensures
# no rollover when we add 1.
self._rtcbuf[6] = (isecs + 1) % 60
rtc.datetime(self._rtcbuf)
self._rtc_set = False
# Could be an outage here, so PPS arrives many secs after last sentence
# Is this right? Does PPS continue during outage?
self._pps_cb(self, *self._pps_cb_args)
# Called when base class updates the epoch_time.
# Need local time for setting Pyboard RTC in interrupt context
def _dtset(self, wday):
t = self.epoch_time + int(3600 * self.local_offset)
y, m, d, hrs, mins, secs, *_ = self._localtime(t)
self._rtcbuf[0] = y
self._rtcbuf[1] = m
self._rtcbuf[2] = d
self._rtcbuf[3] = wday
self._rtcbuf[4] = hrs
self._rtcbuf[5] = mins
self._rtcbuf[6] = secs
# Subsecs register is read-only. So need to set RTC on PPS leading edge.
# Set flag and let ISR set the RTC. Pause until done.
async def set_rtc(self):
if not on_pyboard:
raise OSError('Only available on STM targets.')
self._rtc_set = True
while self._rtc_set:
await asyncio.sleep_ms(250)
# Value of RTC time at current instant. This is a notional arbitrary
# precision integer in μs since Y2K. Notional because RTC is set to
# local time.
def _get_rtc_usecs(self):
y, m, d, weekday, hrs, mins, secs, subsecs = rtc.datetime()
tim = 1000000 * utime.mktime((y, m, d, hrs, mins, secs, weekday - 1, 0))
return tim + ((1000000 * (255 - subsecs)) >> 8)
# Return no. of μs RTC leads GPS. Done by comparing times at the instant of
# PPS leading edge.
async def delta(self):
if not on_pyboard:
raise OSError('Only available on STM targets.')
rtc_time, gps_time = await self._await_pps() # μs since Y2K at time of latest PPS
return rtc_time - gps_time
# Pause until PPS interrupt occurs. Then wait for an RTC subsecond change.
# Read the RTC time in μs since Y2K and adjust to give the time the RTC
# (notionally) would have read at the PPS leading edge.
async def _await_pps(self):
t0 = self.acquired
while self.acquired == t0: # Busy-wait on PPS interrupt: not time-critical
await asyncio.sleep_ms(0) # because acquisition time stored in ISR.
gc.collect() # Time-critical code follows
st = rtc.datetime()[7]
while rtc.datetime()[7] == st: # Wait for RTC to change (4ms max)
pass
dt = utime.ticks_diff(utime.ticks_us(), self.acquired)
trtc = self._get_rtc_usecs() - dt # Read RTC now and adjust for PPS edge
tgps = 1000000 * (self.epoch_time + 3600*self.local_offset + 1)
return trtc, tgps
# Non-realtime calculation of calibration factor. times are in μs
def _calculate(self, gps_start, gps_end, rtc_start, rtc_end):
# Duration (μs) between PPS edges
pps_delta = (gps_end - gps_start)
# Duration (μs) between PPS edges as measured by RTC and corrected
rtc_delta = (rtc_end - rtc_start)
ppm = (1000000 * (rtc_delta - pps_delta)) / pps_delta # parts per million
return int(-ppm/0.954)
# Measure difference between RTC and GPS rate and return calibration factor
# If 3 successive identical results are within 1 digit the outcome is considered
# valid and the coro quits.
async def _getcal(self, minutes=5):
if minutes < 1:
raise ValueError('minutes must be >= 1')
results = [0, 0, 0] # Last 3 cal results
idx = 0 # Index into above circular buffer
nresults = 0 # Count of results
rtc.calibration(0) # Clear existing RTC calibration
await self.set_rtc()
# Wait for PPS, then RTC 1/256 second change. Return the time the RTC
# would have measured at instant of PPS (notional μs since Y2K). Also
# GPS time at the same instant.
rtc_start, gps_start = await self._await_pps()
for n in range(minutes):
for _ in range(6): # Try every 10s
await asyncio.sleep(10)
# Get RTC time at instant of PPS
rtc_end, gps_end = await self._await_pps()
cal = self._calculate(gps_start, gps_end, rtc_start, rtc_end)
print('Mins {:d} cal factor {:d}'.format(n + 1, cal))
results[idx] = cal
idx += 1
idx %= len(results)
nresults += 1
if nresults >= 4 and (abs(max(results) - min(results)) <= 1):
return round(sum(results)/len(results))
return cal
# Pause until time/date message received and 1st PPS interrupt has occurred.
async def ready(self):
while self.acquired is None:
await asyncio.sleep(1)
async def calibrate(self, minutes=5):
if not on_pyboard:
raise OSError('Only available on STM targets.')
print('Waiting for GPS startup.')
await self.ready()
print('Waiting up to {} minutes to acquire calibration factor...'.format(minutes))
cal = await self._getcal(minutes)
if cal <= 512 and cal >= -511:
rtc.calibration(cal)
print('Pyboard RTC is calibrated. Factor is {:d}.'.format(cal))
else:
print('Calibration factor {:d} is out of range.'.format(cal))
# User interface functions: accurate GPS time.
# Return GPS time in ms since midnight (small int on 32 bit h/w).
# No allocation.
def get_ms(self):
state = machine.disable_irq()
t = self.t_ms
acquired = self.acquired
machine.enable_irq(state)
return t + utime.ticks_diff(utime.ticks_us(), acquired) // 1000
# Return accurate GPS time of day (hrs: int, mins: int, secs: int, μs: int)
# The ISR can skip an update of .secs if a day rollover would occur. Next
# RMC handles this, so if updates are at 1s intervals the subsequent ISR
# will see hms = 0, 0, 1 and a value of .acquired > 1000000.
# Even at the slowest update rate of 10s this can't overflow into minutes.
def get_t_split(self):
state = machine.disable_irq()
t = self.t_ms
acquired = self.acquired
machine.enable_irq(state)
isecs, ims = divmod(t, 1000) # Get integer secs and ms
x, secs = divmod(isecs, 60)
hrs, mins = divmod(x, 60)
dt = utime.ticks_diff(utime.ticks_us(), acquired) # μs to time now
ds, us = divmod(dt, 1000000)
# If dt > 1e6 can add to secs without risk of rollover: see above.
self._time[0] = hrs
self._time[1] = mins
self._time[2] = secs + ds
self._time[3] = us + ims*1000
return self._time
GPS_Timer = type('GPS_Timer', (GPS_Tbase, AS_GPS), {'__init__': gps_ro_t_init})
GPS_RWTimer = type('GPS_RWTimer', (GPS_Tbase, GPS), {'__init__': gps_rw_t_init})
| {
"repo_name": "peterhinch/micropython-async",
"path": "v3/as_drivers/as_GPS/as_tGPS.py",
"copies": "1",
"size": "10508",
"license": "mit",
"hash": -3436935152533579000,
"line_mean": 42.7416666667,
"line_max": 90,
"alpha_frac": 0.6064964755,
"autogenerated": false,
"ratio": 3.377734877734878,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4484231353234878,
"avg_score": null,
"num_lines": null
} |
# Copyright (c) 2018 Peter Hinch
# Released under the MIT License (MIT) - see LICENSE file
# TODO Test machine version. Replace LED with callback. Update tests and doc.
import uasyncio as asyncio
import machine
try:
import pyb
on_pyboard = True
rtc = pyb.RTC()
except ImportError:
on_pyboard = False
import utime
import micropython
import gc
import as_GPS
import as_rwGPS
micropython.alloc_emergency_exception_buf(100)
# Convenience function. Return RTC seconds since midnight as float
def rtc_secs():
if not on_pyboard:
raise OSError('Only available on STM targets.')
dt = rtc.datetime()
return 3600*dt[4] + 60*dt[5] + dt[6] + (255 - dt[7])/256
# Constructor for GPS_Timer class
def gps_ro_t_init(self, sreader, pps_pin, local_offset=0,
fix_cb=lambda *_ : None, cb_mask=as_GPS.RMC, fix_cb_args=(),
pps_cb=lambda *_ : None, pps_cb_args=()):
as_GPS.AS_GPS.__init__(self, sreader, local_offset, fix_cb, cb_mask, fix_cb_args)
self.setup(pps_pin, pps_cb, pps_cb_args)
# Constructor for GPS_RWTimer class
def gps_rw_t_init(self, sreader, swriter, pps_pin, local_offset=0,
fix_cb=lambda *_ : None, cb_mask=as_GPS.RMC, fix_cb_args=(),
msg_cb=lambda *_ : None, msg_cb_args=(),
pps_cb=lambda *_ : None, pps_cb_args=()):
as_rwGPS.GPS.__init__(self, sreader, swriter, local_offset, fix_cb, cb_mask, fix_cb_args,
msg_cb, msg_cb_args)
self.setup(pps_pin, pps_cb, pps_cb_args)
class GPS_Tbase():
def setup(self, pps_pin, pps_cb, pps_cb_args):
self._pps_pin = pps_pin
self._pps_cb = pps_cb
self._pps_cb_args = pps_cb_args
self.msecs = None # Integer time in ms since midnight at last PPS
self.t_ms = 0 # ms since midnight
self.acquired = None # Value of ticks_us at edge of PPS
self._rtc_set = False # Set RTC flag
self._rtcbuf = [0]*8 # Buffer for RTC setting
self._time = [0]*4 # get_t_split() time buffer.
loop = asyncio.get_event_loop()
loop.create_task(self._start())
async def _start(self):
await self.data_received(date=True)
self._pps_pin.irq(self._isr, trigger = machine.Pin.IRQ_RISING)
def close(self):
self._pps_pin.irq(None)
# If update rate > 1Hz, when PPS edge occurs the last RMC message will have
# a nonzero ms value. Need to set RTC to 1 sec after the last 1 second boundary
def _isr(self, _):
acquired = utime.ticks_us() # Save time of PPS
# Time in last NMEA sentence was time of last PPS.
# Reduce to integer secs since midnight local time.
isecs = (self.epoch_time + int(3600*self.local_offset)) % 86400
# ms since midnight (28 bits). Add in any ms in RMC data
msecs = isecs * 1000 + self.msecs
# This PPS is presumed to be one update later
msecs += self._update_ms
if msecs >= 86400000: # Next PPS will deal with rollover
return
if self.t_ms == msecs: # No RMC message has arrived: nothing to do
return
self.t_ms = msecs # Current time in ms past midnight
self.acquired = acquired
# Set RTC if required and if last RMC indicated a 1 second boundary
if self._rtc_set:
# Time as int(seconds) in last NMEA sentence. Earlier test ensures
# no rollover when we add 1.
self._rtcbuf[6] = (isecs + 1) % 60
rtc.datetime(self._rtcbuf)
self._rtc_set = False
# Could be an outage here, so PPS arrives many secs after last sentence
# Is this right? Does PPS continue during outage?
self._pps_cb(self, *self._pps_cb_args)
# Called when base class updates the epoch_time.
# Need local time for setting Pyboard RTC in interrupt context
def _dtset(self, wday):
t = self.epoch_time + int(3600 * self.local_offset)
y, m, d, hrs, mins, secs, *_ = self._localtime(t)
self._rtcbuf[0] = y
self._rtcbuf[1] = m
self._rtcbuf[2] = d
self._rtcbuf[3] = wday
self._rtcbuf[4] = hrs
self._rtcbuf[5] = mins
self._rtcbuf[6] = secs
# Subsecs register is read-only. So need to set RTC on PPS leading edge.
# Set flag and let ISR set the RTC. Pause until done.
async def set_rtc(self):
if not on_pyboard:
raise OSError('Only available on STM targets.')
self._rtc_set = True
while self._rtc_set:
await asyncio.sleep_ms(250)
# Value of RTC time at current instant. This is a notional arbitrary
# precision integer in μs since Y2K. Notional because RTC is set to
# local time.
def _get_rtc_usecs(self):
y, m, d, weekday, hrs, mins, secs, subsecs = rtc.datetime()
tim = 1000000 * utime.mktime((y, m, d, hrs, mins, secs, weekday - 1, 0))
return tim + ((1000000 * (255 - subsecs)) >> 8)
# Return no. of μs RTC leads GPS. Done by comparing times at the instant of
# PPS leading edge.
async def delta(self):
if not on_pyboard:
raise OSError('Only available on STM targets.')
rtc_time, gps_time = await self._await_pps() # μs since Y2K at time of latest PPS
return rtc_time - gps_time
# Pause until PPS interrupt occurs. Then wait for an RTC subsecond change.
# Read the RTC time in μs since Y2K and adjust to give the time the RTC
# (notionally) would have read at the PPS leading edge.
async def _await_pps(self):
t0 = self.acquired
while self.acquired == t0: # Busy-wait on PPS interrupt: not time-critical
await asyncio.sleep_ms(0) # because acquisition time stored in ISR.
gc.collect() # Time-critical code follows
st = rtc.datetime()[7]
while rtc.datetime()[7] == st: # Wait for RTC to change (4ms max)
pass
dt = utime.ticks_diff(utime.ticks_us(), self.acquired)
trtc = self._get_rtc_usecs() - dt # Read RTC now and adjust for PPS edge
tgps = 1000000 * (self.epoch_time + 3600*self.local_offset + 1)
return trtc, tgps
# Non-realtime calculation of calibration factor. times are in μs
def _calculate(self, gps_start, gps_end, rtc_start, rtc_end):
# Duration (μs) between PPS edges
pps_delta = (gps_end - gps_start)
# Duration (μs) between PPS edges as measured by RTC and corrected
rtc_delta = (rtc_end - rtc_start)
ppm = (1000000 * (rtc_delta - pps_delta)) / pps_delta # parts per million
return int(-ppm/0.954)
# Measure difference between RTC and GPS rate and return calibration factor
# If 3 successive identical results are within 1 digit the outcome is considered
# valid and the coro quits.
async def _getcal(self, minutes=5):
if minutes < 1:
raise ValueError('minutes must be >= 1')
results = [0, 0, 0] # Last 3 cal results
idx = 0 # Index into above circular buffer
nresults = 0 # Count of results
rtc.calibration(0) # Clear existing RTC calibration
await self.set_rtc()
# Wait for PPS, then RTC 1/256 second change. Return the time the RTC
# would have measured at instant of PPS (notional μs since Y2K). Also
# GPS time at the same instant.
rtc_start, gps_start = await self._await_pps()
for n in range(minutes):
for _ in range(6): # Try every 10s
await asyncio.sleep(10)
# Get RTC time at instant of PPS
rtc_end, gps_end = await self._await_pps()
cal = self._calculate(gps_start, gps_end, rtc_start, rtc_end)
print('Mins {:d} cal factor {:d}'.format(n + 1, cal))
results[idx] = cal
idx += 1
idx %= len(results)
nresults += 1
if nresults >= 4 and (abs(max(results) - min(results)) <= 1):
return round(sum(results)/len(results))
return cal
# Pause until time/date message received and 1st PPS interrupt has occurred.
async def ready(self):
while self.acquired is None:
await asyncio.sleep(1)
async def calibrate(self, minutes=5):
if not on_pyboard:
raise OSError('Only available on STM targets.')
print('Waiting for GPS startup.')
await self.ready()
print('Waiting up to {} minutes to acquire calibration factor...'.format(minutes))
cal = await self._getcal(minutes)
if cal <= 512 and cal >= -511:
rtc.calibration(cal)
print('Pyboard RTC is calibrated. Factor is {:d}.'.format(cal))
else:
print('Calibration factor {:d} is out of range.'.format(cal))
# User interface functions: accurate GPS time.
# Return GPS time in ms since midnight (small int on 32 bit h/w).
# No allocation.
def get_ms(self):
state = machine.disable_irq()
t = self.t_ms
acquired = self.acquired
machine.enable_irq(state)
return t + utime.ticks_diff(utime.ticks_us(), acquired) // 1000
# Return accurate GPS time of day (hrs: int, mins: int, secs: int, μs: int)
# The ISR can skip an update of .secs if a day rollover would occur. Next
# RMC handles this, so if updates are at 1s intervals the subsequent ISR
# will see hms = 0, 0, 1 and a value of .acquired > 1000000.
# Even at the slowest update rate of 10s this can't overflow into minutes.
def get_t_split(self):
state = machine.disable_irq()
t = self.t_ms
acquired = self.acquired
machine.enable_irq(state)
isecs, ims = divmod(t, 1000) # Get integer secs and ms
x, secs = divmod(isecs, 60)
hrs, mins = divmod(x, 60)
dt = utime.ticks_diff(utime.ticks_us(), acquired) # μs to time now
ds, us = divmod(dt, 1000000)
# If dt > 1e6 can add to secs without risk of rollover: see above.
self._time[0] = hrs
self._time[1] = mins
self._time[2] = secs + ds
self._time[3] = us + ims*1000
return self._time
GPS_Timer = type('GPS_Timer', (GPS_Tbase, as_GPS.AS_GPS), {'__init__': gps_ro_t_init})
GPS_RWTimer = type('GPS_RWTimer', (GPS_Tbase, as_rwGPS.GPS), {'__init__': gps_rw_t_init})
| {
"repo_name": "peterhinch/micropython-async",
"path": "v2/gps/as_tGPS.py",
"copies": "1",
"size": "10558",
"license": "mit",
"hash": 9020208297725953000,
"line_mean": 42.7676348548,
"line_max": 93,
"alpha_frac": 0.6065604854,
"autogenerated": false,
"ratio": 3.37104506232023,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9467474166979863,
"avg_score": 0.002026276148073293,
"num_lines": 241
} |
"""ASTNG hooks for the Python 2 standard library.
Currently help understanding of :
* hashlib.md5 and hashlib.sha1
"""
from logilab.astng import MANAGER
from logilab.astng.builder import ASTNGBuilder
MODULE_TRANSFORMS = {}
def hashlib_transform(module):
fake = ASTNGBuilder(MANAGER).string_build('''
class md5(object):
def __init__(self, value): pass
def digest():
return u''
def update(self, value): pass
def hexdigest(self):
return u''
class sha1(object):
def __init__(self, value): pass
def digest():
return u''
def update(self, value): pass
def hexdigest(self):
return u''
''')
for hashfunc in ('sha1', 'md5'):
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = ASTNGBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
class deque(object):
maxlen = 0
def __init__(iterable=None, maxlen=None): pass
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): pass
def popleft(self): pass
def remove(self, value): pass
def reverse(self): pass
def rotate(self, n): pass
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = ASTNGBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
def resource_isdir(package_or_requirement, resource_name):
pass
def resource_filename(package_or_requirement, resource_name):
pass
def resource_stream(package_or_requirement, resource_name):
pass
def resource_string(package_or_requirement, resource_name):
pass
def resource_listdir(package_or_requirement, resource_name):
pass
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
pass
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
''')
for func_name, func in list(fake.locals.items()):
module.locals[func_name] = func
def urlparse_transform(module):
fake = ASTNGBuilder(MANAGER).string_build('''
def urlparse(url, scheme='', allow_fragments=True):
return ParseResult()
class ParseResult(object):
def __init__(self):
self.scheme = ''
self.netloc = ''
self.path = ''
self.params = ''
self.query = ''
self.fragment = ''
self.username = None
self.password = None
self.hostname = None
self.port = None
def geturl(self):
return ''
''')
for func_name, func in list(fake.locals.items()):
module.locals[func_name] = func
def subprocess_transform(module):
fake = ASTNGBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
def communicate(self, input=None):
return ('string', 'string')
def wait(self):
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
''')
for func_name, func in list(fake.locals.items()):
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['urlparse'] = urlparse_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
from logilab.astng import MANAGER
MANAGER.register_transformer(transform)
| {
"repo_name": "tlksio/tlksio",
"path": "env/lib/python3.4/site-packages/logilab/astng/brain/py2stdlib.py",
"copies": "1",
"size": "4246",
"license": "mit",
"hash": -6912772612641845000,
"line_mean": 22.3296703297,
"line_max": 63,
"alpha_frac": 0.6497880358,
"autogenerated": false,
"ratio": 3.744268077601411,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4894056113401411,
"avg_score": null,
"num_lines": null
} |
# AST nodes for micro-ML.
#
# Eli Bendersky [http://eli.thegreenplace.net]
# This code is in the public domain.
class ASTNode:
def visit_children(self, func):
"""Visit all children with a function that takes a child node."""
for child in self._children:
func(child)
# Used by the type inference algorithm.
_type = None
# Used by passes that traverse the AST. Each concrete node class lists the
# sub-nodes it has as children.
_children = []
class IntConstant(ASTNode):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class BoolConstant(ASTNode):
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
class Identifier(ASTNode):
def __init__(self, name):
self.name = name
def __str__(self):
return self.name
class OpExpr(ASTNode):
"""Binary operation between expressions."""
def __init__(self, op, left, right):
self.op = op
self.left = left
self.right = right
self._children = [self.left, self.right]
def __str__(self):
return '({} {} {})'.format(self.left, self.op, self.right)
class AppExpr(ASTNode):
"""Application of a function to a sequence of arguments.
func is a node, args is a sequence of nodes.
"""
def __init__(self, func, args=()):
self.func = func
self.args = args
self._children = [self.func, *self.args]
def __str__(self):
return 'App({}, [{}])'.format(self.func,
', '.join(map(str, self.args)))
class IfExpr(ASTNode):
"""if ... then ... else ... expression."""
def __init__(self, ifexpr, thenexpr, elseexpr):
self.ifexpr = ifexpr
self.thenexpr = thenexpr
self.elseexpr = elseexpr
self._children = [self.ifexpr, self.thenexpr, self.elseexpr]
def __str__(self):
return 'If({}, {}, {})'.format(
self.ifexpr, self.thenexpr, self.elseexpr)
class LambdaExpr(ASTNode):
"""lambda [args] -> expr"""
def __init__(self, argnames, expr):
self.argnames = argnames
self.expr = expr
self._children = [self.expr]
def __str__(self):
return 'Lambda([{}], {})'.format(', '.join(self.argnames), self.expr)
# Used by the type inference algorithm to map discovered types for the
# arguments of the lambda. Since we list arguments as names (strings) and
# not ASTNodes, we can't keep their _type on the node.
_arg_types = None
class Decl(ASTNode):
"""Declaration mapping name = expr.
For functions expr is a Lambda node.
"""
def __init__(self, name, expr):
self.name = name
self.expr = expr
self._children = [self.expr]
def __str__(self):
return 'Decl({}, {})'.format(
self.name, self.expr)
| {
"repo_name": "eliben/code-for-blog",
"path": "2018/type-inference/ast.py",
"copies": "1",
"size": "2937",
"license": "unlicense",
"hash": 4551662777410067500,
"line_mean": 25.2232142857,
"line_max": 78,
"alpha_frac": 0.5767790262,
"autogenerated": false,
"ratio": 3.7847938144329896,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9856531761701417,
"avg_score": 0.0010082157863145259,
"num_lines": 112
} |
"""AST nodes generated by the parser for the compiler. Also provides
some node tree helper functions used by the parser and compiler in order
to normalize nodes.
"""
import inspect
import operator
from collections import deque
from typing import Any
from typing import Tuple as TupleType
from markupsafe import Markup
_binop_to_func = {
"*": operator.mul,
"/": operator.truediv,
"//": operator.floordiv,
"**": operator.pow,
"%": operator.mod,
"+": operator.add,
"-": operator.sub,
}
_uaop_to_func = {
"not": operator.not_,
"+": operator.pos,
"-": operator.neg,
}
_cmpop_to_func = {
"eq": operator.eq,
"ne": operator.ne,
"gt": operator.gt,
"gteq": operator.ge,
"lt": operator.lt,
"lteq": operator.le,
"in": lambda a, b: a in b,
"notin": lambda a, b: a not in b,
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(mcs, name, bases, d):
for attr in "fields", "attributes":
storage = []
storage.extend(getattr(bases[0] if bases else object, attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) <= 1, "multiple inheritance not allowed"
assert len(storage) == len(set(storage)), "layout conflict"
d[attr] = tuple(storage)
d.setdefault("abstract", False)
return type.__new__(mcs, name, bases, d)
class EvalContext:
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(self, environment, template_name=None):
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self):
return self.__dict__.copy()
def revert(self, old):
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node, ctx):
if ctx is None:
if node.environment is None:
raise RuntimeError(
"if no eval context is passed, the node must have an"
" attached environment."
)
return EvalContext(node.environment)
return ctx
class Node(metaclass=NodeType):
"""Baseclass for all Jinja nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields: TupleType = ()
attributes = ("lineno", "environment")
abstract = True
def __init__(self, *fields, **attributes):
if self.abstract:
raise TypeError("abstract nodes are not instantiable")
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError(f"{self.__class__.__name__!r} takes 0 arguments")
raise TypeError(
f"{self.__class__.__name__!r} takes 0 or {len(self.fields)}"
f" argument{'s' if len(self.fields) != 1 else ''}"
)
for name, arg in zip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError(f"unknown attribute {next(iter(attributes))!r}")
def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (
(exclude is only is None)
or (exclude is not None and name not in exclude)
or (only is not None and name in only)
):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for _, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
yield from child.find_all(node_type)
def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if "ctx" in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if "lineno" in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return tuple(self.iter_fields()) == tuple(other.iter_fields())
def __hash__(self):
return hash(tuple(self.iter_fields()))
def __repr__(self):
args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
return f"{self.__class__.__name__}({args_str})"
def dump(self):
def _dump(node):
if not isinstance(node, Node):
buf.append(repr(node))
return
buf.append(f"nodes.{node.__class__.__name__}(")
if not node.fields:
buf.append(")")
return
for idx, field in enumerate(node.fields):
if idx:
buf.append(", ")
value = getattr(node, field)
if isinstance(value, list):
buf.append("[")
for idx, item in enumerate(value):
if idx:
buf.append(", ")
_dump(item)
buf.append("]")
else:
_dump(value)
buf.append(")")
buf = []
_dump(self)
return "".join(buf)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ("body",)
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ("nodes",)
class Extends(Stmt):
"""Represents an extends statement."""
fields = ("template",)
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ("target", "iter", "body", "else_", "test", "recursive")
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ("test", "body", "elif_", "else_")
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ("name", "args", "defaults", "body")
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ("call", "args", "defaults", "body")
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ("body", "filter")
class With(Stmt):
"""Specific node for with statements. In older versions of Jinja the
with statement was implemented on the base of the `Scope` node instead.
.. versionadded:: 2.9.3
"""
fields = ("targets", "values", "body")
class Block(Stmt):
"""A node that represents a block.
.. versionchanged:: 3.0.0
the `required` field was added.
"""
fields = ("name", "body", "scoped", "required")
class Include(Stmt):
"""A node that represents the include tag."""
fields = ("template", "with_context", "ignore_missing")
class Import(Stmt):
"""A node that represents the import tag."""
fields = ("template", "target", "with_context")
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ("template", "names", "with_context")
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ("node",)
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ("target", "node")
class AssignBlock(Stmt):
"""Assigns a block to a target."""
fields = ("target", "filter", "body")
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ("left", "right")
operator: Any = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_binops
):
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ("node",)
operator: Any = None
abstract = True
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
self.environment.sandboxed
and self.operator in self.environment.intercepted_unops
):
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ("name", "ctx")
def can_assign(self):
return self.name not in ("true", "false", "none", "True", "False", "None")
class NSRef(Expr):
"""Reference to a namespace value assignment"""
fields = ("name", "attr")
def can_assign(self):
# We don't need any special checks here; NSRef assignments have a
# runtime check to ensure the target is a namespace object which will
# have been checked already as it is created using a normal assignment
# which goes through a `Name` node.
return True
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ("value",)
def as_const(self, eval_ctx=None):
return self.value
@classmethod
def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ("data",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ("items", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self):
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ("items",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ("key", "value")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ("test", "expr1", "expr2")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
def args_as_const(node, eval_ctx):
args = [x.as_const(eval_ctx) for x in node.args]
kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
if node.dyn_args is not None:
try:
args.extend(node.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if node.dyn_kwargs is not None:
try:
kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
return args, kwargs
class Filter(Expr):
"""This node applies a filter on an expression. `name` is the name of
the filter, the rest of the fields are the same as for :class:`Call`.
If the `node` of a filter is `None` the contents of the last buffer are
filtered. Buffers are created by macros and filter blocks.
"""
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile or self.node is None:
raise Impossible()
filter_ = self.environment.filters.get(self.name)
if filter_ is None or getattr(filter_, "contextfilter", False) is True:
raise Impossible()
# We cannot constant handle async filters, so we need to make
# sure to not go down this path. Account for both sync/async and
# pure-async filters.
if eval_ctx.environment.is_async and (
getattr(filter_, "asyncfiltervariant", False)
or inspect.iscoroutinefunction(filter_)
):
raise Impossible()
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
if getattr(filter_, "evalcontextfilter", False) is True:
args.insert(0, eval_ctx)
elif getattr(filter_, "environmentfilter", False) is True:
args.insert(0, self.environment)
try:
return filter_(*args, **kwargs)
except Exception:
raise Impossible()
class Test(Expr):
"""Applies a test on an expression. `name` is the name of the test, the
rest of the fields are the same as for :class:`Call`.
"""
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
def as_const(self, eval_ctx=None):
test = self.environment.tests.get(self.name)
if test is None:
raise Impossible()
eval_ctx = get_eval_context(self, eval_ctx)
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
try:
return test(*args, **kwargs)
except Exception:
raise Impossible()
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ("node", "arg", "ctx")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if self.ctx != "load":
raise Impossible()
try:
return self.environment.getitem(
self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ("node", "attr", "ctx")
def as_const(self, eval_ctx=None):
if self.ctx != "load":
raise Impossible()
try:
eval_ctx = get_eval_context(self, eval_ctx)
return self.environment.getattr(self.node.as_const(eval_ctx), self.attr)
except Exception:
raise Impossible()
def can_assign(self):
return False
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ("start", "stop", "step")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj):
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting
them to strings.
"""
fields = ("nodes",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\\s.
"""
fields = ("expr", "ops")
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
if not result:
return False
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ("op", "expr")
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = "*"
class Div(BinExpr):
"""Divides the left by the right node."""
operator = "/"
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = "//"
class Add(BinExpr):
"""Add the left to the right node."""
operator = "+"
class Sub(BinExpr):
"""Subtract the right from the left node."""
operator = "-"
class Mod(BinExpr):
"""Left modulo right."""
operator = "%"
class Pow(BinExpr):
"""Left to the power of right."""
operator = "**"
class And(BinExpr):
"""Short circuited AND."""
operator = "and"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = "or"
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = "not"
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = "-"
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = "+"
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ("name",)
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ("identifier", "name")
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ("importname",)
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not threated specially by the compiler.
"""
fields = ("name",)
def __init__(self):
raise TypeError(
"Can't create internal names. Use the "
"`free_identifier` method on a parser."
)
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ("expr",)
def as_const(self, eval_ctx=None):
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
This is basically equivalent to using the
:func:`~jinja2.contextfunction` decorator when using the
high-level API, which causes a reference to the context to be passed
as the first argument to a function.
"""
class DerivedContextReference(Expr):
"""Return the current template context including locals. Behaves
exactly like :class:`ContextReference`, but includes local
variables, such as from a ``for`` loop.
.. versionadded:: 2.11
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ("body",)
class OverlayScope(Stmt):
"""An overlay scope for extensions. This is a largely unoptimized scope
that however can be used to introduce completely arbitrary variables into
a sub scope from a dictionary or dictionary like object. The `context`
field has to evaluate to a dictionary object.
Example usage::
OverlayScope(context=self.call_method('get_context'),
body=[...])
.. versionadded:: 2.10
"""
fields = ("context", "body")
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ("options",)
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ("body",)
# make sure nobody creates custom nodes
def _failing_new(*args, **kwargs):
raise TypeError("can't create custom node types")
NodeType.__new__ = staticmethod(_failing_new) # type: ignore
del _failing_new
| {
"repo_name": "mitsuhiko/jinja2",
"path": "src/jinja2/nodes.py",
"copies": "2",
"size": "30186",
"license": "bsd-3-clause",
"hash": -634074959863519200,
"line_mean": 27.4505183789,
"line_max": 85,
"alpha_frac": 0.6035248128,
"autogenerated": false,
"ratio": 4.035022055874883,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5638546868674883,
"avg_score": null,
"num_lines": null
} |
"""AST nodes generated by the parser for the compiler. Also provides
some node tree helper functions used by the parser and compiler in order
to normalize nodes.
"""
import inspect
import operator
import typing as t
from collections import deque
from markupsafe import Markup
from .utils import _PassArg
if t.TYPE_CHECKING:
import typing_extensions as te
from .environment import Environment
_NodeBound = t.TypeVar("_NodeBound", bound="Node")
_binop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
"*": operator.mul,
"/": operator.truediv,
"//": operator.floordiv,
"**": operator.pow,
"%": operator.mod,
"+": operator.add,
"-": operator.sub,
}
_uaop_to_func: t.Dict[str, t.Callable[[t.Any], t.Any]] = {
"not": operator.not_,
"+": operator.pos,
"-": operator.neg,
}
_cmpop_to_func: t.Dict[str, t.Callable[[t.Any, t.Any], t.Any]] = {
"eq": operator.eq,
"ne": operator.ne,
"gt": operator.gt,
"gteq": operator.ge,
"lt": operator.lt,
"lteq": operator.le,
"in": lambda a, b: a in b,
"notin": lambda a, b: a not in b,
}
class Impossible(Exception):
"""Raised if the node could not perform a requested action."""
class NodeType(type):
"""A metaclass for nodes that handles the field and attribute
inheritance. fields and attributes from the parent class are
automatically forwarded to the child."""
def __new__(mcs, name, bases, d): # type: ignore
for attr in "fields", "attributes":
storage = []
storage.extend(getattr(bases[0] if bases else object, attr, ()))
storage.extend(d.get(attr, ()))
assert len(bases) <= 1, "multiple inheritance not allowed"
assert len(storage) == len(set(storage)), "layout conflict"
d[attr] = tuple(storage)
d.setdefault("abstract", False)
return type.__new__(mcs, name, bases, d)
class EvalContext:
"""Holds evaluation time information. Custom attributes can be attached
to it in extensions.
"""
def __init__(
self, environment: "Environment", template_name: t.Optional[str] = None
) -> None:
self.environment = environment
if callable(environment.autoescape):
self.autoescape = environment.autoescape(template_name)
else:
self.autoescape = environment.autoescape
self.volatile = False
def save(self) -> t.Mapping[str, t.Any]:
return self.__dict__.copy()
def revert(self, old: t.Mapping[str, t.Any]) -> None:
self.__dict__.clear()
self.__dict__.update(old)
def get_eval_context(node: "Node", ctx: t.Optional[EvalContext]) -> EvalContext:
if ctx is None:
if node.environment is None:
raise RuntimeError(
"if no eval context is passed, the node must have an"
" attached environment."
)
return EvalContext(node.environment)
return ctx
class Node(metaclass=NodeType):
"""Baseclass for all Jinja nodes. There are a number of nodes available
of different types. There are four major types:
- :class:`Stmt`: statements
- :class:`Expr`: expressions
- :class:`Helper`: helper nodes
- :class:`Template`: the outermost wrapper node
All nodes have fields and attributes. Fields may be other nodes, lists,
or arbitrary values. Fields are passed to the constructor as regular
positional arguments, attributes as keyword arguments. Each node has
two attributes: `lineno` (the line number of the node) and `environment`.
The `environment` attribute is set at the end of the parsing process for
all nodes automatically.
"""
fields: t.Tuple[str, ...] = ()
attributes: t.Tuple[str, ...] = ("lineno", "environment")
abstract = True
lineno: int
environment: t.Optional["Environment"]
def __init__(self, *fields: t.Any, **attributes: t.Any) -> None:
if self.abstract:
raise TypeError("abstract nodes are not instantiable")
if fields:
if len(fields) != len(self.fields):
if not self.fields:
raise TypeError(f"{type(self).__name__!r} takes 0 arguments")
raise TypeError(
f"{type(self).__name__!r} takes 0 or {len(self.fields)}"
f" argument{'s' if len(self.fields) != 1 else ''}"
)
for name, arg in zip(self.fields, fields):
setattr(self, name, arg)
for attr in self.attributes:
setattr(self, attr, attributes.pop(attr, None))
if attributes:
raise TypeError(f"unknown attribute {next(iter(attributes))!r}")
def iter_fields(
self,
exclude: t.Optional[t.Container[str]] = None,
only: t.Optional[t.Container[str]] = None,
) -> t.Iterator[t.Tuple[str, t.Any]]:
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (
(exclude is None and only is None)
or (exclude is not None and name not in exclude)
or (only is not None and name in only)
):
try:
yield name, getattr(self, name)
except AttributeError:
pass
def iter_child_nodes(
self,
exclude: t.Optional[t.Container[str]] = None,
only: t.Optional[t.Container[str]] = None,
) -> t.Iterator["Node"]:
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for _, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item
def find(self, node_type: t.Type[_NodeBound]) -> t.Optional[_NodeBound]:
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result
return None
def find_all(
self, node_type: t.Union[t.Type[_NodeBound], t.Tuple[t.Type[_NodeBound], ...]]
) -> t.Iterator[_NodeBound]:
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child # type: ignore
yield from child.find_all(node_type)
def set_ctx(self, ctx: str) -> "Node":
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if "ctx" in node.fields:
node.ctx = ctx # type: ignore
todo.extend(node.iter_child_nodes())
return self
def set_lineno(self, lineno: int, override: bool = False) -> "Node":
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if "lineno" in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self
def set_environment(self, environment: "Environment") -> "Node":
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self
def __eq__(self, other: t.Any) -> bool:
if type(self) is not type(other):
return NotImplemented
return tuple(self.iter_fields()) == tuple(other.iter_fields())
def __hash__(self) -> int:
return hash(tuple(self.iter_fields()))
def __repr__(self) -> str:
args_str = ", ".join(f"{a}={getattr(self, a, None)!r}" for a in self.fields)
return f"{type(self).__name__}({args_str})"
def dump(self) -> str:
def _dump(node: t.Union[Node, t.Any]) -> None:
if not isinstance(node, Node):
buf.append(repr(node))
return
buf.append(f"nodes.{type(node).__name__}(")
if not node.fields:
buf.append(")")
return
for idx, field in enumerate(node.fields):
if idx:
buf.append(", ")
value = getattr(node, field)
if isinstance(value, list):
buf.append("[")
for idx, item in enumerate(value):
if idx:
buf.append(", ")
_dump(item)
buf.append("]")
else:
_dump(value)
buf.append(")")
buf: t.List[str] = []
_dump(self)
return "".join(buf)
class Stmt(Node):
"""Base node for all statements."""
abstract = True
class Helper(Node):
"""Nodes that exist in a specific context only."""
abstract = True
class Template(Node):
"""Node that represents a template. This must be the outermost node that
is passed to the compiler.
"""
fields = ("body",)
body: t.List[Node]
class Output(Stmt):
"""A node that holds multiple expressions which are then printed out.
This is used both for the `print` statement and the regular template data.
"""
fields = ("nodes",)
nodes: t.List["Expr"]
class Extends(Stmt):
"""Represents an extends statement."""
fields = ("template",)
template: "Expr"
class For(Stmt):
"""The for loop. `target` is the target for the iteration (usually a
:class:`Name` or :class:`Tuple`), `iter` the iterable. `body` is a list
of nodes that are used as loop-body, and `else_` a list of nodes for the
`else` block. If no else node exists it has to be an empty list.
For filtered nodes an expression can be stored as `test`, otherwise `None`.
"""
fields = ("target", "iter", "body", "else_", "test", "recursive")
target: Node
iter: Node
body: t.List[Node]
else_: t.List[Node]
test: t.Optional[Node]
recursive: bool
class If(Stmt):
"""If `test` is true, `body` is rendered, else `else_`."""
fields = ("test", "body", "elif_", "else_")
test: Node
body: t.List[Node]
elif_: t.List["If"]
else_: t.List[Node]
class Macro(Stmt):
"""A macro definition. `name` is the name of the macro, `args` a list of
arguments and `defaults` a list of defaults if there are any. `body` is
a list of nodes for the macro body.
"""
fields = ("name", "args", "defaults", "body")
name: str
args: t.List["Name"]
defaults: t.List["Expr"]
body: t.List[Node]
class CallBlock(Stmt):
"""Like a macro without a name but a call instead. `call` is called with
the unnamed macro as `caller` argument this node holds.
"""
fields = ("call", "args", "defaults", "body")
call: "Call"
args: t.List["Name"]
defaults: t.List["Expr"]
body: t.List[Node]
class FilterBlock(Stmt):
"""Node for filter sections."""
fields = ("body", "filter")
body: t.List[Node]
filter: "Filter"
class With(Stmt):
"""Specific node for with statements. In older versions of Jinja the
with statement was implemented on the base of the `Scope` node instead.
.. versionadded:: 2.9.3
"""
fields = ("targets", "values", "body")
targets: t.List["Expr"]
values: t.List["Expr"]
body: t.List[Node]
class Block(Stmt):
"""A node that represents a block.
.. versionchanged:: 3.0.0
the `required` field was added.
"""
fields = ("name", "body", "scoped", "required")
name: str
body: t.List[Node]
scoped: bool
required: bool
class Include(Stmt):
"""A node that represents the include tag."""
fields = ("template", "with_context", "ignore_missing")
template: "Expr"
with_context: bool
ignore_missing: bool
class Import(Stmt):
"""A node that represents the import tag."""
fields = ("template", "target", "with_context")
template: "Expr"
target: str
with_context: bool
class FromImport(Stmt):
"""A node that represents the from import tag. It's important to not
pass unsafe names to the name attribute. The compiler translates the
attribute lookups directly into getattr calls and does *not* use the
subscript callback of the interface. As exported variables may not
start with double underscores (which the parser asserts) this is not a
problem for regular Jinja code, but if this node is used in an extension
extra care must be taken.
The list of names may contain tuples if aliases are wanted.
"""
fields = ("template", "names", "with_context")
template: "Expr"
names: t.List[t.Union[str, t.Tuple[str, str]]]
with_context: bool
class ExprStmt(Stmt):
"""A statement that evaluates an expression and discards the result."""
fields = ("node",)
node: Node
class Assign(Stmt):
"""Assigns an expression to a target."""
fields = ("target", "node")
target: "Expr"
node: Node
class AssignBlock(Stmt):
"""Assigns a block to a target."""
fields = ("target", "filter", "body")
target: "Expr"
filter: t.Optional["Filter"]
body: t.List[Node]
class Expr(Node):
"""Baseclass for all expressions."""
abstract = True
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible()
def can_assign(self) -> bool:
"""Check if it's possible to assign something to this node."""
return False
class BinExpr(Expr):
"""Baseclass for all binary expressions."""
fields = ("left", "right")
left: Expr
right: Expr
operator: str
abstract = True
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
eval_ctx.environment.sandboxed
and self.operator in eval_ctx.environment.intercepted_binops # type: ignore
):
raise Impossible()
f = _binop_to_func[self.operator]
try:
return f(self.left.as_const(eval_ctx), self.right.as_const(eval_ctx))
except Exception:
raise Impossible()
class UnaryExpr(Expr):
"""Baseclass for all unary expressions."""
fields = ("node",)
node: Expr
operator: str
abstract = True
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
# intercepted operators cannot be folded at compile time
if (
eval_ctx.environment.sandboxed
and self.operator in eval_ctx.environment.intercepted_unops # type: ignore
):
raise Impossible()
f = _uaop_to_func[self.operator]
try:
return f(self.node.as_const(eval_ctx))
except Exception:
raise Impossible()
class Name(Expr):
"""Looks up a name or stores a value in a name.
The `ctx` of the node can be one of the following values:
- `store`: store a value in the name
- `load`: load that name
- `param`: like `store` but if the name was defined as function parameter.
"""
fields = ("name", "ctx")
name: str
ctx: str
def can_assign(self) -> bool:
return self.name not in {"true", "false", "none", "True", "False", "None"}
class NSRef(Expr):
"""Reference to a namespace value assignment"""
fields = ("name", "attr")
name: str
attr: str
def can_assign(self) -> bool:
# We don't need any special checks here; NSRef assignments have a
# runtime check to ensure the target is a namespace object which will
# have been checked already as it is created using a normal assignment
# which goes through a `Name` node.
return True
class Literal(Expr):
"""Baseclass for literals."""
abstract = True
class Const(Literal):
"""All constant values. The parser will return this node for simple
constants such as ``42`` or ``"foo"`` but it can be used to store more
complex values such as lists too. Only constants with a safe
representation (objects where ``eval(repr(x)) == x`` is true).
"""
fields = ("value",)
value: t.Any
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
return self.value
@classmethod
def from_untrusted(
cls,
value: t.Any,
lineno: t.Optional[int] = None,
environment: "t.Optional[Environment]" = None,
) -> "Const":
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment)
class TemplateData(Literal):
"""A constant template string."""
fields = ("data",)
data: str
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if eval_ctx.autoescape:
return Markup(self.data)
return self.data
class Tuple(Literal):
"""For loop unpacking and some other things like multiple arguments
for subscripts. Like for :class:`Name` `ctx` specifies if the tuple
is used for loading the names or storing.
"""
fields = ("items", "ctx")
items: t.List[Expr]
ctx: str
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[t.Any, ...]:
eval_ctx = get_eval_context(self, eval_ctx)
return tuple(x.as_const(eval_ctx) for x in self.items)
def can_assign(self) -> bool:
for item in self.items:
if not item.can_assign():
return False
return True
class List(Literal):
"""Any list literal such as ``[1, 2, 3]``"""
fields = ("items",)
items: t.List[Expr]
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.List[t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return [x.as_const(eval_ctx) for x in self.items]
class Dict(Literal):
"""Any dict literal such as ``{1: 2, 3: 4}``. The items must be a list of
:class:`Pair` nodes.
"""
fields = ("items",)
items: t.List["Pair"]
def as_const(
self, eval_ctx: t.Optional[EvalContext] = None
) -> t.Dict[t.Any, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return dict(x.as_const(eval_ctx) for x in self.items)
class Pair(Helper):
"""A key, value pair for dicts."""
fields = ("key", "value")
key: Expr
value: Expr
def as_const(
self, eval_ctx: t.Optional[EvalContext] = None
) -> t.Tuple[t.Any, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return self.key.as_const(eval_ctx), self.value.as_const(eval_ctx)
class Keyword(Helper):
"""A key, value pair for keyword arguments where key is a string."""
fields = ("key", "value")
key: str
value: Expr
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Tuple[str, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
return self.key, self.value.as_const(eval_ctx)
class CondExpr(Expr):
"""A conditional expression (inline if expression). (``{{
foo if bar else baz }}``)
"""
fields = ("test", "expr1", "expr2")
test: Expr
expr1: Expr
expr2: t.Optional[Expr]
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
if self.test.as_const(eval_ctx):
return self.expr1.as_const(eval_ctx)
# if we evaluate to an undefined object, we better do that at runtime
if self.expr2 is None:
raise Impossible()
return self.expr2.as_const(eval_ctx)
def args_as_const(
node: t.Union["_FilterTestCommon", "Call"], eval_ctx: t.Optional[EvalContext]
) -> t.Tuple[t.List[t.Any], t.Dict[t.Any, t.Any]]:
args = [x.as_const(eval_ctx) for x in node.args]
kwargs = dict(x.as_const(eval_ctx) for x in node.kwargs)
if node.dyn_args is not None:
try:
args.extend(node.dyn_args.as_const(eval_ctx))
except Exception:
raise Impossible()
if node.dyn_kwargs is not None:
try:
kwargs.update(node.dyn_kwargs.as_const(eval_ctx))
except Exception:
raise Impossible()
return args, kwargs
class _FilterTestCommon(Expr):
fields = ("node", "name", "args", "kwargs", "dyn_args", "dyn_kwargs")
node: Expr
name: str
args: t.List[Expr]
kwargs: t.List[Pair]
dyn_args: t.Optional[Expr]
dyn_kwargs: t.Optional[Expr]
abstract = True
_is_filter = True
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
if self._is_filter:
env_map = eval_ctx.environment.filters
else:
env_map = eval_ctx.environment.tests
func = env_map.get(self.name)
pass_arg = _PassArg.from_obj(func) # type: ignore
if func is None or pass_arg is _PassArg.context:
raise Impossible()
if eval_ctx.environment.is_async and (
getattr(func, "jinja_async_variant", False) is True
or inspect.iscoroutinefunction(func)
):
raise Impossible()
args, kwargs = args_as_const(self, eval_ctx)
args.insert(0, self.node.as_const(eval_ctx))
if pass_arg is _PassArg.eval_context:
args.insert(0, eval_ctx)
elif pass_arg is _PassArg.environment:
args.insert(0, eval_ctx.environment)
try:
return func(*args, **kwargs)
except Exception:
raise Impossible()
class Filter(_FilterTestCommon):
"""Apply a filter to an expression. ``name`` is the name of the
filter, the other fields are the same as :class:`Call`.
If ``node`` is ``None``, the filter is being used in a filter block
and is applied to the content of the block.
"""
node: t.Optional[Expr] # type: ignore
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
if self.node is None:
raise Impossible()
return super().as_const(eval_ctx=eval_ctx)
class Test(_FilterTestCommon):
"""Apply a test to an expression. ``name`` is the name of the test,
the other field are the same as :class:`Call`.
.. versionchanged:: 3.0
``as_const`` shares the same logic for filters and tests. Tests
check for volatile, async, and ``@pass_context`` etc.
decorators.
"""
_is_filter = False
class Call(Expr):
"""Calls an expression. `args` is a list of arguments, `kwargs` a list
of keyword arguments (list of :class:`Keyword` nodes), and `dyn_args`
and `dyn_kwargs` has to be either `None` or a node that is used as
node for dynamic positional (``*args``) or keyword (``**kwargs``)
arguments.
"""
fields = ("node", "args", "kwargs", "dyn_args", "dyn_kwargs")
node: Expr
args: t.List[Expr]
kwargs: t.List[Keyword]
dyn_args: t.Optional[Expr]
dyn_kwargs: t.Optional[Expr]
class Getitem(Expr):
"""Get an attribute or item from an expression and prefer the item."""
fields = ("node", "arg", "ctx")
node: Expr
arg: Expr
ctx: str
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
if self.ctx != "load":
raise Impossible()
eval_ctx = get_eval_context(self, eval_ctx)
try:
return eval_ctx.environment.getitem(
self.node.as_const(eval_ctx), self.arg.as_const(eval_ctx)
)
except Exception:
raise Impossible()
class Getattr(Expr):
"""Get an attribute or item from an expression that is a ascii-only
bytestring and prefer the attribute.
"""
fields = ("node", "attr", "ctx")
node: Expr
attr: str
ctx: str
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
if self.ctx != "load":
raise Impossible()
eval_ctx = get_eval_context(self, eval_ctx)
try:
return eval_ctx.environment.getattr(self.node.as_const(eval_ctx), self.attr)
except Exception:
raise Impossible()
class Slice(Expr):
"""Represents a slice object. This must only be used as argument for
:class:`Subscript`.
"""
fields = ("start", "stop", "step")
start: t.Optional[Expr]
stop: t.Optional[Expr]
step: t.Optional[Expr]
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> slice:
eval_ctx = get_eval_context(self, eval_ctx)
def const(obj: t.Optional[Expr]) -> t.Optional[t.Any]:
if obj is None:
return None
return obj.as_const(eval_ctx)
return slice(const(self.start), const(self.stop), const(self.step))
class Concat(Expr):
"""Concatenates the list of expressions provided after converting
them to strings.
"""
fields = ("nodes",)
nodes: t.List[Expr]
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> str:
eval_ctx = get_eval_context(self, eval_ctx)
return "".join(str(x.as_const(eval_ctx)) for x in self.nodes)
class Compare(Expr):
"""Compares an expression with some other expressions. `ops` must be a
list of :class:`Operand`\\s.
"""
fields = ("expr", "ops")
expr: Expr
ops: t.List["Operand"]
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
result = value = self.expr.as_const(eval_ctx)
try:
for op in self.ops:
new_value = op.expr.as_const(eval_ctx)
result = _cmpop_to_func[op.op](value, new_value)
if not result:
return False
value = new_value
except Exception:
raise Impossible()
return result
class Operand(Helper):
"""Holds an operator and an expression."""
fields = ("op", "expr")
op: str
expr: Expr
class Mul(BinExpr):
"""Multiplies the left with the right node."""
operator = "*"
class Div(BinExpr):
"""Divides the left by the right node."""
operator = "/"
class FloorDiv(BinExpr):
"""Divides the left by the right node and truncates conver the
result into an integer by truncating.
"""
operator = "//"
class Add(BinExpr):
"""Add the left to the right node."""
operator = "+"
class Sub(BinExpr):
"""Subtract the right from the left node."""
operator = "-"
class Mod(BinExpr):
"""Left modulo right."""
operator = "%"
class Pow(BinExpr):
"""Left to the power of right."""
operator = "**"
class And(BinExpr):
"""Short circuited AND."""
operator = "and"
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) and self.right.as_const(eval_ctx)
class Or(BinExpr):
"""Short circuited OR."""
operator = "or"
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> t.Any:
eval_ctx = get_eval_context(self, eval_ctx)
return self.left.as_const(eval_ctx) or self.right.as_const(eval_ctx)
class Not(UnaryExpr):
"""Negate the expression."""
operator = "not"
class Neg(UnaryExpr):
"""Make the expression negative."""
operator = "-"
class Pos(UnaryExpr):
"""Make the expression positive (noop for most expressions)"""
operator = "+"
# Helpers for extensions
class EnvironmentAttribute(Expr):
"""Loads an attribute from the environment object. This is useful for
extensions that want to call a callback stored on the environment.
"""
fields = ("name",)
name: str
class ExtensionAttribute(Expr):
"""Returns the attribute of an extension bound to the environment.
The identifier is the identifier of the :class:`Extension`.
This node is usually constructed by calling the
:meth:`~jinja2.ext.Extension.attr` method on an extension.
"""
fields = ("identifier", "name")
identifier: str
name: str
class ImportedName(Expr):
"""If created with an import name the import name is returned on node
access. For example ``ImportedName('cgi.escape')`` returns the `escape`
function from the cgi module on evaluation. Imports are optimized by the
compiler so there is no need to assign them to local variables.
"""
fields = ("importname",)
importname: str
class InternalName(Expr):
"""An internal name in the compiler. You cannot create these nodes
yourself but the parser provides a
:meth:`~jinja2.parser.Parser.free_identifier` method that creates
a new identifier for you. This identifier is not available from the
template and is not treated specially by the compiler.
"""
fields = ("name",)
name: str
def __init__(self) -> None:
raise TypeError(
"Can't create internal names. Use the "
"`free_identifier` method on a parser."
)
class MarkSafe(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`)."""
fields = ("expr",)
expr: Expr
def as_const(self, eval_ctx: t.Optional[EvalContext] = None) -> Markup:
eval_ctx = get_eval_context(self, eval_ctx)
return Markup(self.expr.as_const(eval_ctx))
class MarkSafeIfAutoescape(Expr):
"""Mark the wrapped expression as safe (wrap it as `Markup`) but
only if autoescaping is active.
.. versionadded:: 2.5
"""
fields = ("expr",)
expr: Expr
def as_const(
self, eval_ctx: t.Optional[EvalContext] = None
) -> t.Union[Markup, t.Any]:
eval_ctx = get_eval_context(self, eval_ctx)
if eval_ctx.volatile:
raise Impossible()
expr = self.expr.as_const(eval_ctx)
if eval_ctx.autoescape:
return Markup(expr)
return expr
class ContextReference(Expr):
"""Returns the current template context. It can be used like a
:class:`Name` node, with a ``'load'`` ctx and will return the
current :class:`~jinja2.runtime.Context` object.
Here an example that assigns the current template name to a
variable named `foo`::
Assign(Name('foo', ctx='store'),
Getattr(ContextReference(), 'name'))
This is basically equivalent to using the
:func:`~jinja2.pass_context` decorator when using the high-level
API, which causes a reference to the context to be passed as the
first argument to a function.
"""
class DerivedContextReference(Expr):
"""Return the current template context including locals. Behaves
exactly like :class:`ContextReference`, but includes local
variables, such as from a ``for`` loop.
.. versionadded:: 2.11
"""
class Continue(Stmt):
"""Continue a loop."""
class Break(Stmt):
"""Break a loop."""
class Scope(Stmt):
"""An artificial scope."""
fields = ("body",)
body: t.List[Node]
class OverlayScope(Stmt):
"""An overlay scope for extensions. This is a largely unoptimized scope
that however can be used to introduce completely arbitrary variables into
a sub scope from a dictionary or dictionary like object. The `context`
field has to evaluate to a dictionary object.
Example usage::
OverlayScope(context=self.call_method('get_context'),
body=[...])
.. versionadded:: 2.10
"""
fields = ("context", "body")
context: Expr
body: t.List[Node]
class EvalContextModifier(Stmt):
"""Modifies the eval context. For each option that should be modified,
a :class:`Keyword` has to be added to the :attr:`options` list.
Example to change the `autoescape` setting::
EvalContextModifier(options=[Keyword('autoescape', Const(True))])
"""
fields = ("options",)
options: t.List[Keyword]
class ScopedEvalContextModifier(EvalContextModifier):
"""Modifies the eval context and reverts it later. Works exactly like
:class:`EvalContextModifier` but will only modify the
:class:`~jinja2.nodes.EvalContext` for nodes in the :attr:`body`.
"""
fields = ("body",)
body: t.List[Node]
# make sure nobody creates custom nodes
def _failing_new(*args: t.Any, **kwargs: t.Any) -> "te.NoReturn":
raise TypeError("can't create custom node types")
NodeType.__new__ = staticmethod(_failing_new) # type: ignore
del _failing_new
| {
"repo_name": "pallets/jinja",
"path": "src/jinja2/nodes.py",
"copies": "1",
"size": "34509",
"license": "bsd-3-clause",
"hash": 7240030929555247000,
"line_mean": 27.6381742739,
"line_max": 88,
"alpha_frac": 0.6006549016,
"autogenerated": false,
"ratio": 3.8467283468955524,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.49473832484955527,
"avg_score": null,
"num_lines": null
} |
"""utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes"""
from mako import exceptions, pyparser, util
import re
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in the code
self.declared_identifiers = util.Set()
# represents all identifiers which are referenced before their assignment, if any
self.undeclared_identifiers = util.Set()
# note that an identifier can be in both the undeclared and declared lists.
# using AST to parse instead of using code.co_varnames, code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if its declared later in the same block of code
# - AST is less likely to break with version changes (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, basestring):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = util.Set()
self.undeclared_identifiers = util.Set()
if isinstance(code, basestring):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control statements
e.g.
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
if not m:
raise exceptions.CompileException("Fragment '%s' is not a partial control statement" % code, **exception_kwargs)
if m.group(3):
code = code[:m.start(3)]
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code + "pass"
elif keyword == 'try':
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
code = "try:pass\n" + code + "pass"
else:
raise exceptions.CompileException("Unsupported control keyword: '%s'" % keyword, **exception_kwargs)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, 'funcname'):
raise exceptions.CompileException("Code '%s' is not a function declaration" % code, **exception_kwargs)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException("'**%s' keyword argument not allowed here" % self.argnames[-1], **exception_kwargs)
def get_argument_expressions(self, include_defaults=True):
"""return the argument declarations of this FunctionDecl as a printable list."""
namedecls = []
defaults = [d for d in self.defaults]
kwargs = self.kwargs
varargs = self.varargs
argnames = [f for f in self.argnames]
argnames.reverse()
for arg in argnames:
default = None
if kwargs:
arg = "**" + arg
kwargs = False
elif varargs:
arg = "*" + arg
varargs = False
else:
default = len(defaults) and defaults.pop() or None
if include_defaults and default:
namedecls.insert(0, "%s=%s" % (arg, pyparser.ExpressionGenerator(default).value()))
else:
namedecls.insert(0, arg)
return namedecls
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, **kwargs)
| {
"repo_name": "bdoms/mako",
"path": "ast.py",
"copies": "2",
"size": "5251",
"license": "mit",
"hash": -8948120892019975000,
"line_mean": 41.008,
"line_max": 129,
"alpha_frac": 0.5945534184,
"autogenerated": false,
"ratio": 4.217670682730923,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011831266588945363,
"num_lines": 125
} |
"""utilities for analyzing expressions and blocks of Python
code, as well as generating Python from AST nodes"""
from mako import exceptions, pyparser, util
import re
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in the code
self.declared_identifiers = set()
# represents all identifiers which are referenced before their assignment, if any
self.undeclared_identifiers = set()
# note that an identifier can be in both the undeclared and declared lists.
# using AST to parse instead of using code.co_varnames,
# code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if
# its declared later in the same block of code
# - AST is less likely to break with version changes
# (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, basestring):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = set()
self.undeclared_identifiers = set()
if isinstance(code, basestring):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control statements
e.g.
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
if not m:
raise exceptions.CompileException(
"Fragment '%s' is not a partial control statement" %
code, **exception_kwargs)
if m.group(3):
code = code[:m.start(3)]
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code + "pass"
elif keyword == 'try':
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
code = "try:pass\n" + code + "pass"
else:
raise exceptions.CompileException(
"Unsupported control keyword: '%s'" %
keyword, **exception_kwargs)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, 'funcname'):
raise exceptions.CompileException(
"Code '%s' is not a function declaration" % code,
**exception_kwargs)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException(
"'**%s' keyword argument not allowed here" %
self.argnames[-1], **exception_kwargs)
def get_argument_expressions(self, include_defaults=True):
"""return the argument declarations of this FunctionDecl as a printable list."""
namedecls = []
defaults = [d for d in self.defaults]
kwargs = self.kwargs
varargs = self.varargs
argnames = [f for f in self.argnames]
argnames.reverse()
for arg in argnames:
default = None
if kwargs:
arg = "**" + arg
kwargs = False
elif varargs:
arg = "*" + arg
varargs = False
else:
default = len(defaults) and defaults.pop() or None
if include_defaults and default:
namedecls.insert(0, "%s=%s" %
(arg,
pyparser.ExpressionGenerator(default).value()
)
)
else:
namedecls.insert(0, arg)
return namedecls
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, **kwargs)
| {
"repo_name": "farseerfc/jgments",
"path": "lib/Mako-0.3.4/mako/ast.py",
"copies": "21",
"size": "5646",
"license": "bsd-2-clause",
"hash": -7867280888822451000,
"line_mean": 38.4825174825,
"line_max": 89,
"alpha_frac": 0.5501239816,
"autogenerated": false,
"ratio": 4.463241106719368,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.013725965154779435,
"num_lines": 143
} |
"""utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes"""
from mako import exceptions, pyparser, util
import re
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in the code
self.declared_identifiers = util.Set()
# represents all identifiers which are referenced before their assignment, if any
self.undeclared_identifiers = util.Set()
# note that an identifier can be in both the undeclared and declared lists.
# using AST to parse instead of using code.co_varnames, code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if its declared later in the same block of code
# - AST is less likely to break with version changes (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, basestring):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = util.Set()
self.undeclared_identifiers = util.Set()
if isinstance(code, basestring):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control statements
e.g.
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:\s*(#|$)', code.strip(), re.S)
if not m:
raise exceptions.CompileException("Fragment '%s' is not a partial control statement" % code, **exception_kwargs)
if m.group(3):
code = code[:m.start(3)]
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code + "pass"
elif keyword == 'try':
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
code = "try:pass\n" + code + "pass"
else:
raise exceptions.CompileException("Unsupported control keyword: '%s'" % keyword, **exception_kwargs)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, 'funcname'):
raise exceptions.CompileException("Code '%s' is not a function declaration" % code, **exception_kwargs)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException("'**%s' keyword argument not allowed here" % self.argnames[-1], **exception_kwargs)
def get_argument_expressions(self, include_defaults=True):
"""return the argument declarations of this FunctionDecl as a printable list."""
namedecls = []
defaults = [d for d in self.defaults]
kwargs = self.kwargs
varargs = self.varargs
argnames = [f for f in self.argnames]
argnames.reverse()
for arg in argnames:
default = None
if kwargs:
arg = "**" + arg
kwargs = False
elif varargs:
arg = "*" + arg
varargs = False
else:
default = len(defaults) and defaults.pop() or None
if include_defaults and default:
namedecls.insert(0, "%s=%s" % (arg, pyparser.ExpressionGenerator(default).value()))
else:
namedecls.insert(0, arg)
return namedecls
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, **kwargs)
| {
"repo_name": "bhatfield/titanium_mobile",
"path": "support/common/mako/ast.py",
"copies": "46",
"size": "5239",
"license": "apache-2.0",
"hash": -3650610462567076400,
"line_mean": 40.912,
"line_max": 129,
"alpha_frac": 0.594388242,
"autogenerated": false,
"ratio": 4.221595487510073,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""utilities for analyzing expressions and blocks of Python code, as well as generating Python from AST nodes"""
from compiler import ast, visitor
from compiler import parse as compiler_parse
from mako import util, exceptions
from StringIO import StringIO
import re
def parse(code, mode, **exception_kwargs):
try:
return compiler_parse(code, mode)
except SyntaxError, e:
raise exceptions.SyntaxException("(%s) %s (%s)" % (e.__class__.__name__, str(e), repr(code[0:50])), **exception_kwargs)
class PythonCode(object):
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in the code
self.declared_identifiers = util.Set()
# represents all identifiers which are referenced before their assignment, if any
self.undeclared_identifiers = util.Set()
# note that an identifier can be in both the undeclared and declared lists.
# using AST to parse instead of using code.co_varnames, code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if its declared later in the same block of code
# - AST is less likely to break with version changes (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, basestring):
expr = parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
class FindIdentifiers(object):
def __init__(self):
self.in_function = False
self.local_ident_stack = {}
def _add_declared(s, name):
if not s.in_function:
self.declared_identifiers.add(name)
def visitClass(self, node, *args):
self._add_declared(node.name)
def visitAssName(self, node, *args):
self._add_declared(node.name)
def visitAssign(self, node, *args):
# flip around the visiting of Assign so the expression gets evaluated first,
# in the case of a clause like "x=x+5" (x is undeclared)
self.visit(node.expr, *args)
for n in node.nodes:
self.visit(n, *args)
def visitFunction(self,node, *args):
self._add_declared(node.name)
# push function state onto stack. dont log any
# more identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared".
# track argument names in each function header so they arent counted as "undeclared"
saved = {}
inf = self.in_function
self.in_function = True
for arg in node.argnames:
if arg in self.local_ident_stack:
saved[arg] = True
else:
self.local_ident_stack[arg] = True
for n in node.getChildNodes():
self.visit(n, *args)
self.in_function = inf
for arg in node.argnames:
if arg not in saved:
del self.local_ident_stack[arg]
def visitFor(self, node, *args):
# flip around visit
self.visit(node.list, *args)
self.visit(node.assign, *args)
self.visit(node.body, *args)
def visitName(s, node, *args):
if node.name not in __builtins__ and node.name not in self.declared_identifiers and node.name not in s.local_ident_stack:
self.undeclared_identifiers.add(node.name)
def visitImport(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
self._add_declared(mod.split('.')[0])
def visitFrom(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
if mod == '*':
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **exception_kwargs)
self._add_declared(mod)
f = FindIdentifiers()
visitor.walk(expr, f) #, walker=walker())
class ArgumentList(object):
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = util.Set()
self.undeclared_identifiers = util.Set()
class FindTuple(object):
def visitTuple(s, node, *args):
for n in node.nodes:
p = PythonCode(n, **exception_kwargs)
self.codeargs.append(p)
self.args.append(ExpressionGenerator(n).value())
self.declared_identifiers = self.declared_identifiers.union(p.declared_identifiers)
self.undeclared_identifiers = self.undeclared_identifiers.union(p.undeclared_identifiers)
if isinstance(code, basestring):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = parse(code, "exec", **exception_kwargs)
else:
expr = code
f = FindTuple()
visitor.walk(expr, f)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control statements
e.g.
for x in 5:
elif y==9:
except (MyException, e):
etc.
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r'^(\w+)(?:\s+(.*?))?:$', code.strip(), re.S)
if not m:
raise exceptions.CompileException("Fragment '%s' is not a partial control statement" % code, **exception_kwargs)
(keyword, expr) = m.group(1,2)
if keyword in ['for','if', 'while']:
code = code + "pass"
elif keyword == 'try':
code = code + "pass\nexcept:pass"
elif keyword == 'elif' or keyword == 'else':
code = "if False:pass\n" + code + "pass"
elif keyword == 'except':
code = "try:pass\n" + code + "pass"
else:
raise exceptions.CompileException("Unsupported control keyword: '%s'" % keyword, **exception_kwargs)
super(PythonFragment, self).__init__(code, **exception_kwargs)
class walker(visitor.ASTVisitor):
def dispatch(self, node, *args):
print "Node:", str(node)
#print "dir:", dir(node)
return visitor.ASTVisitor.dispatch(self, node, *args)
class FunctionDecl(object):
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = parse(code, "exec", **exception_kwargs)
class ParseFunc(object):
def visitFunction(s, node, *args):
self.funcname = node.name
self.argnames = node.argnames
self.defaults = node.defaults
self.varargs = node.varargs
self.kwargs = node.kwargs
f = ParseFunc()
visitor.walk(expr, f)
if not hasattr(self, 'funcname'):
raise exceptions.CompileException("Code '%s' is not a function declaration" % code, **exception_kwargs)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException("'**%s' keyword argument not allowed here" % self.argnames[-1], **exception_kwargs)
def get_argument_expressions(self, include_defaults=True):
"""return the argument declarations of this FunctionDecl as a printable list."""
namedecls = []
defaults = [d for d in self.defaults]
kwargs = self.kwargs
varargs = self.varargs
argnames = [f for f in self.argnames]
argnames.reverse()
for arg in argnames:
default = None
if kwargs:
arg = "**" + arg
kwargs = False
elif varargs:
arg = "*" + arg
varargs = False
else:
default = len(defaults) and defaults.pop() or None
if include_defaults and default:
namedecls.insert(0, "%s=%s" % (arg, ExpressionGenerator(default).value()))
else:
namedecls.insert(0, arg)
return namedecls
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super(FunctionArgs, self).__init__("def ANON(%s):pass" % code, **kwargs)
class ExpressionGenerator(object):
"""given an AST node, generates an equivalent literal Python expression."""
def __init__(self, astnode):
self.buf = StringIO()
visitor.walk(astnode, self) #, walker=walker())
def value(self):
return self.buf.getvalue()
def operator(self, op, node, *args):
self.buf.write("(")
self.visit(node.left, *args)
self.buf.write(" %s " % op)
self.visit(node.right, *args)
self.buf.write(")")
def booleanop(self, op, node, *args):
self.visit(node.nodes[0])
for n in node.nodes[1:]:
self.buf.write(" " + op + " ")
self.visit(n, *args)
def visitConst(self, node, *args):
self.buf.write(repr(node.value))
def visitAssName(self, node, *args):
# TODO: figure out OP_ASSIGN, other OP_s
self.buf.write(node.name)
def visitName(self, node, *args):
self.buf.write(node.name)
def visitMul(self, node, *args):
self.operator("*", node, *args)
def visitAnd(self, node, *args):
self.booleanop("and", node, *args)
def visitOr(self, node, *args):
self.booleanop("or", node, *args)
def visitBitand(self, node, *args):
self.booleanop("&", node, *args)
def visitBitor(self, node, *args):
self.booleanop("|", node, *args)
def visitBitxor(self, node, *args):
self.booleanop("^", node, *args)
def visitAdd(self, node, *args):
self.operator("+", node, *args)
def visitGetattr(self, node, *args):
self.visit(node.expr, *args)
self.buf.write(".%s" % node.attrname)
def visitSub(self, node, *args):
self.operator("-", node, *args)
def visitNot(self, node, *args):
self.buf.write("not ")
self.visit(node.expr)
def visitDiv(self, node, *args):
self.operator("/", node, *args)
def visitFloorDiv(self, node, *args):
self.operator("//", node, *args)
def visitSubscript(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
[self.visit(x) for x in node.subs]
self.buf.write("]")
def visitUnarySub(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitUnaryAdd(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitSlice(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
if node.lower is not None:
self.visit(node.lower)
self.buf.write(":")
if node.upper is not None:
self.visit(node.upper)
self.buf.write("]")
def visitDict(self, node):
self.buf.write("{")
c = node.getChildren()
for i in range(0, len(c), 2):
self.visit(c[i])
self.buf.write(": ")
self.visit(c[i+1])
if i<len(c) -2:
self.buf.write(", ")
self.buf.write("}")
def visitTuple(self, node):
self.buf.write("(")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write(")")
def visitList(self, node):
self.buf.write("[")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write("]")
def visitListComp(self, node):
self.buf.write("[")
self.visit(node.expr)
self.buf.write(" ")
for n in node.quals:
self.visit(n)
self.buf.write("]")
def visitListCompFor(self, node):
self.buf.write(" for ")
self.visit(node.assign)
self.buf.write(" in ")
self.visit(node.list)
for n in node.ifs:
self.visit(n)
def visitListCompIf(self, node):
self.buf.write(" if ")
self.visit(node.test)
def visitCompare(self, node):
self.visit(node.expr)
for tup in node.ops:
self.buf.write(tup[0])
self.visit(tup[1])
def visitCallFunc(self, node, *args):
self.visit(node.node)
self.buf.write("(")
if len(node.args):
self.visit(node.args[0])
for a in node.args[1:]:
self.buf.write(", ")
self.visit(a)
self.buf.write(")")
| {
"repo_name": "gdubost1/shaderman",
"path": "mako/ast.py",
"copies": "5",
"size": "13990",
"license": "bsd-3-clause",
"hash": 2980278161335682600,
"line_mean": 39.7900874636,
"line_max": 248,
"alpha_frac": 0.5486061472,
"autogenerated": false,
"ratio": 4.02821767923985,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.011211696861487232,
"num_lines": 343
} |
"""Handles parsing of Python code.
Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
module is used.
"""
from StringIO import StringIO
from mako import exceptions, util
# words that cannot be assigned to (notably smaller than the total keys in __builtins__)
reserved = util.Set(['True', 'False', 'None'])
try:
import _ast
util.restore__ast(_ast)
import _ast_util
except ImportError:
_ast = None
from compiler import parse as compiler_parse
from compiler import visitor
def parse(code, mode='exec', **exception_kwargs):
"""Parse an expression into AST"""
try:
if _ast:
return _ast_util.parse(code, '<unknown>', mode)
else:
return compiler_parse(code, mode)
except Exception, e:
raise exceptions.SyntaxException("(%s) %s (%s)" % (e.__class__.__name__, str(e), repr(code[0:50])), **exception_kwargs)
if _ast:
class FindIdentifiers(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.in_assign_targets = False
self.local_ident_stack = {}
self.listener = listener
self.exception_kwargs = exception_kwargs
def _add_declared(self, name):
if not self.in_function:
self.listener.declared_identifiers.add(name)
def visit_ClassDef(self, node):
self._add_declared(node.name)
def visit_Assign(self, node):
# flip around the visiting of Assign so the expression gets evaluated first,
# in the case of a clause like "x=x+5" (x is undeclared)
self.visit(node.value)
in_a = self.in_assign_targets
self.in_assign_targets = True
for n in node.targets:
self.visit(n)
self.in_assign_targets = in_a
def visit_FunctionDef(self, node):
self._add_declared(node.name)
# push function state onto stack. dont log any
# more identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared".
# track argument names in each function header so they arent counted as "undeclared"
saved = {}
inf = self.in_function
self.in_function = True
for arg in node.args.args:
if arg.id in self.local_ident_stack:
saved[arg.id] = True
else:
self.local_ident_stack[arg.id] = True
for n in node.body:
self.visit(n)
self.in_function = inf
for arg in node.args.args:
if arg.id not in saved:
del self.local_ident_stack[arg.id]
def visit_For(self, node):
# flip around visit
self.visit(node.iter)
self.visit(node.target)
for statement in node.body:
self.visit(statement)
for statement in node.orelse:
self.visit(statement)
def visit_Name(self, node):
if isinstance(node.ctx, _ast.Store):
self._add_declared(node.id)
if node.id not in reserved and node.id not in self.listener.declared_identifiers and node.id not in self.local_ident_stack:
self.listener.undeclared_identifiers.add(node.id)
def visit_Import(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
self._add_declared(name.name.split('.')[0])
def visit_ImportFrom(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
if name.name == '*':
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
self._add_declared(name.name)
class FindTuple(_ast_util.NodeVisitor):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
self.code_factory = code_factory
def visit_Tuple(self, node):
for n in node.elts:
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
class ParseFunc(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
def visit_FunctionDef(self, node):
self.listener.funcname = node.name
argnames = [arg.id for arg in node.args.args]
if node.args.vararg:
argnames.append(node.args.vararg)
if node.args.kwarg:
argnames.append(node.args.kwarg)
self.listener.argnames = argnames
self.listener.defaults = node.args.defaults # ast
self.listener.varargs = node.args.vararg
self.listener.kwargs = node.args.kwarg
class ExpressionGenerator(object):
def __init__(self, astnode):
self.generator = _ast_util.SourceGenerator(' ' * 4)
self.generator.visit(astnode)
def value(self):
return ''.join(self.generator.result)
else:
class FindIdentifiers(object):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.local_ident_stack = {}
self.listener = listener
self.exception_kwargs = exception_kwargs
def _add_declared(self, name):
if not self.in_function:
self.listener.declared_identifiers.add(name)
def visitClass(self, node, *args):
self._add_declared(node.name)
def visitAssName(self, node, *args):
self._add_declared(node.name)
def visitAssign(self, node, *args):
# flip around the visiting of Assign so the expression gets evaluated first,
# in the case of a clause like "x=x+5" (x is undeclared)
self.visit(node.expr, *args)
for n in node.nodes:
self.visit(n, *args)
def visitFunction(self,node, *args):
self._add_declared(node.name)
# push function state onto stack. dont log any
# more identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared".
# track argument names in each function header so they arent counted as "undeclared"
saved = {}
inf = self.in_function
self.in_function = True
for arg in node.argnames:
if arg in self.local_ident_stack:
saved[arg] = True
else:
self.local_ident_stack[arg] = True
for n in node.getChildNodes():
self.visit(n, *args)
self.in_function = inf
for arg in node.argnames:
if arg not in saved:
del self.local_ident_stack[arg]
def visitFor(self, node, *args):
# flip around visit
self.visit(node.list, *args)
self.visit(node.assign, *args)
self.visit(node.body, *args)
def visitName(self, node, *args):
if node.name not in reserved and node.name not in self.listener.declared_identifiers and node.name not in self.local_ident_stack:
self.listener.undeclared_identifiers.add(node.name)
def visitImport(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
self._add_declared(mod.split('.')[0])
def visitFrom(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
if mod == '*':
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
self._add_declared(mod)
def visit(self, expr):
visitor.walk(expr, self) #, walker=walker())
class FindTuple(object):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
self.code_factory = code_factory
def visitTuple(self, node, *args):
for n in node.nodes:
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
def visit(self, expr):
visitor.walk(expr, self) #, walker=walker())
class ParseFunc(object):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
def visitFunction(self, node, *args):
self.listener.funcname = node.name
self.listener.argnames = node.argnames
self.listener.defaults = node.defaults
self.listener.varargs = node.varargs
self.listener.kwargs = node.kwargs
def visit(self, expr):
visitor.walk(expr, self)
class ExpressionGenerator(object):
"""given an AST node, generates an equivalent literal Python expression."""
def __init__(self, astnode):
self.buf = StringIO()
visitor.walk(astnode, self) #, walker=walker())
def value(self):
return self.buf.getvalue()
def operator(self, op, node, *args):
self.buf.write("(")
self.visit(node.left, *args)
self.buf.write(" %s " % op)
self.visit(node.right, *args)
self.buf.write(")")
def booleanop(self, op, node, *args):
self.visit(node.nodes[0])
for n in node.nodes[1:]:
self.buf.write(" " + op + " ")
self.visit(n, *args)
def visitConst(self, node, *args):
self.buf.write(repr(node.value))
def visitAssName(self, node, *args):
# TODO: figure out OP_ASSIGN, other OP_s
self.buf.write(node.name)
def visitName(self, node, *args):
self.buf.write(node.name)
def visitMul(self, node, *args):
self.operator("*", node, *args)
def visitAnd(self, node, *args):
self.booleanop("and", node, *args)
def visitOr(self, node, *args):
self.booleanop("or", node, *args)
def visitBitand(self, node, *args):
self.booleanop("&", node, *args)
def visitBitor(self, node, *args):
self.booleanop("|", node, *args)
def visitBitxor(self, node, *args):
self.booleanop("^", node, *args)
def visitAdd(self, node, *args):
self.operator("+", node, *args)
def visitGetattr(self, node, *args):
self.visit(node.expr, *args)
self.buf.write(".%s" % node.attrname)
def visitSub(self, node, *args):
self.operator("-", node, *args)
def visitNot(self, node, *args):
self.buf.write("not ")
self.visit(node.expr)
def visitDiv(self, node, *args):
self.operator("/", node, *args)
def visitFloorDiv(self, node, *args):
self.operator("//", node, *args)
def visitSubscript(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
[self.visit(x) for x in node.subs]
self.buf.write("]")
def visitUnarySub(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitUnaryAdd(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitSlice(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
if node.lower is not None:
self.visit(node.lower)
self.buf.write(":")
if node.upper is not None:
self.visit(node.upper)
self.buf.write("]")
def visitDict(self, node):
self.buf.write("{")
c = node.getChildren()
for i in range(0, len(c), 2):
self.visit(c[i])
self.buf.write(": ")
self.visit(c[i+1])
if i<len(c) -2:
self.buf.write(", ")
self.buf.write("}")
def visitTuple(self, node):
self.buf.write("(")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write(")")
def visitList(self, node):
self.buf.write("[")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write("]")
def visitListComp(self, node):
self.buf.write("[")
self.visit(node.expr)
self.buf.write(" ")
for n in node.quals:
self.visit(n)
self.buf.write("]")
def visitListCompFor(self, node):
self.buf.write(" for ")
self.visit(node.assign)
self.buf.write(" in ")
self.visit(node.list)
for n in node.ifs:
self.visit(n)
def visitListCompIf(self, node):
self.buf.write(" if ")
self.visit(node.test)
def visitCompare(self, node):
self.visit(node.expr)
for tup in node.ops:
self.buf.write(tup[0])
self.visit(tup[1])
def visitCallFunc(self, node, *args):
self.visit(node.node)
self.buf.write("(")
if len(node.args):
self.visit(node.args[0])
for a in node.args[1:]:
self.buf.write(", ")
self.visit(a)
self.buf.write(")")
class walker(visitor.ASTVisitor):
def dispatch(self, node, *args):
print "Node:", str(node)
#print "dir:", dir(node)
return visitor.ASTVisitor.dispatch(self, node, *args)
| {
"repo_name": "collinprice/titanium_mobile",
"path": "support/common/mako/pyparser.py",
"copies": "42",
"size": "15757",
"license": "apache-2.0",
"hash": 3209651881480400000,
"line_mean": 41.4716981132,
"line_max": 249,
"alpha_frac": 0.5453449261,
"autogenerated": false,
"ratio": 4.09379059495973,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""Handles parsing of Python code.
Parsing to AST is done via _ast on Python > 2.5, otherwise the compiler
module is used.
"""
import sys
from StringIO import StringIO
from mako import exceptions, util
# words that cannot be assigned to (notably smaller than the total keys in __builtins__)
reserved = set(['True', 'False', 'None'])
new_ast = sys.version_info > (2, 5)
if new_ast:
import _ast
util.restore__ast(_ast)
import _ast_util
else:
from compiler import parse as compiler_parse
from compiler import visitor
def parse(code, mode='exec', **exception_kwargs):
"""Parse an expression into AST"""
try:
if new_ast:
return _ast_util.parse(code, '<unknown>', mode)
else:
return compiler_parse(code, mode)
except Exception, e:
raise exceptions.SyntaxException("(%s) %s (%s)" % (e.__class__.__name__, str(e), repr(code[0:50])), **exception_kwargs)
if new_ast:
class FindIdentifiers(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.in_assign_targets = False
self.local_ident_stack = {}
self.listener = listener
self.exception_kwargs = exception_kwargs
def _add_declared(self, name):
if not self.in_function:
self.listener.declared_identifiers.add(name)
def visit_ClassDef(self, node):
self._add_declared(node.name)
def visit_Assign(self, node):
# flip around the visiting of Assign so the expression gets evaluated first,
# in the case of a clause like "x=x+5" (x is undeclared)
self.visit(node.value)
in_a = self.in_assign_targets
self.in_assign_targets = True
for n in node.targets:
self.visit(n)
self.in_assign_targets = in_a
def visit_FunctionDef(self, node):
self._add_declared(node.name)
# push function state onto stack. dont log any
# more identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared".
# track argument names in each function header so they arent counted as "undeclared"
saved = {}
inf = self.in_function
self.in_function = True
for arg in node.args.args:
if arg.id in self.local_ident_stack:
saved[arg.id] = True
else:
self.local_ident_stack[arg.id] = True
for n in node.body:
self.visit(n)
self.in_function = inf
for arg in node.args.args:
if arg.id not in saved:
del self.local_ident_stack[arg.id]
def visit_For(self, node):
# flip around visit
self.visit(node.iter)
self.visit(node.target)
for statement in node.body:
self.visit(statement)
for statement in node.orelse:
self.visit(statement)
def visit_Name(self, node):
if isinstance(node.ctx, _ast.Store):
self._add_declared(node.id)
if node.id not in reserved and node.id not in self.listener.declared_identifiers and node.id not in self.local_ident_stack:
self.listener.undeclared_identifiers.add(node.id)
def visit_Import(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
self._add_declared(name.name.split('.')[0])
def visit_ImportFrom(self, node):
for name in node.names:
if name.asname is not None:
self._add_declared(name.asname)
else:
if name.name == '*':
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
self._add_declared(name.name)
class FindTuple(_ast_util.NodeVisitor):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
self.code_factory = code_factory
def visit_Tuple(self, node):
for n in node.elts:
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
class ParseFunc(_ast_util.NodeVisitor):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
def visit_FunctionDef(self, node):
self.listener.funcname = node.name
argnames = [arg.id for arg in node.args.args]
if node.args.vararg:
argnames.append(node.args.vararg)
if node.args.kwarg:
argnames.append(node.args.kwarg)
self.listener.argnames = argnames
self.listener.defaults = node.args.defaults # ast
self.listener.varargs = node.args.vararg
self.listener.kwargs = node.args.kwarg
class ExpressionGenerator(object):
def __init__(self, astnode):
self.generator = _ast_util.SourceGenerator(' ' * 4)
self.generator.visit(astnode)
def value(self):
return ''.join(self.generator.result)
else:
class FindIdentifiers(object):
def __init__(self, listener, **exception_kwargs):
self.in_function = False
self.local_ident_stack = {}
self.listener = listener
self.exception_kwargs = exception_kwargs
def _add_declared(self, name):
if not self.in_function:
self.listener.declared_identifiers.add(name)
def visitClass(self, node, *args):
self._add_declared(node.name)
def visitAssName(self, node, *args):
self._add_declared(node.name)
def visitAssign(self, node, *args):
# flip around the visiting of Assign so the expression gets evaluated first,
# in the case of a clause like "x=x+5" (x is undeclared)
self.visit(node.expr, *args)
for n in node.nodes:
self.visit(n, *args)
def visitFunction(self,node, *args):
self._add_declared(node.name)
# push function state onto stack. dont log any
# more identifiers as "declared" until outside of the function,
# but keep logging identifiers as "undeclared".
# track argument names in each function header so they arent counted as "undeclared"
saved = {}
inf = self.in_function
self.in_function = True
for arg in node.argnames:
if arg in self.local_ident_stack:
saved[arg] = True
else:
self.local_ident_stack[arg] = True
for n in node.getChildNodes():
self.visit(n, *args)
self.in_function = inf
for arg in node.argnames:
if arg not in saved:
del self.local_ident_stack[arg]
def visitFor(self, node, *args):
# flip around visit
self.visit(node.list, *args)
self.visit(node.assign, *args)
self.visit(node.body, *args)
def visitName(self, node, *args):
if node.name not in reserved and node.name not in self.listener.declared_identifiers and node.name not in self.local_ident_stack:
self.listener.undeclared_identifiers.add(node.name)
def visitImport(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
self._add_declared(mod.split('.')[0])
def visitFrom(self, node, *args):
for (mod, alias) in node.names:
if alias is not None:
self._add_declared(alias)
else:
if mod == '*':
raise exceptions.CompileException("'import *' is not supported, since all identifier names must be explicitly declared. Please use the form 'from <modulename> import <name1>, <name2>, ...' instead.", **self.exception_kwargs)
self._add_declared(mod)
def visit(self, expr):
visitor.walk(expr, self) #, walker=walker())
class FindTuple(object):
def __init__(self, listener, code_factory, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
self.code_factory = code_factory
def visitTuple(self, node, *args):
for n in node.nodes:
p = self.code_factory(n, **self.exception_kwargs)
self.listener.codeargs.append(p)
self.listener.args.append(ExpressionGenerator(n).value())
self.listener.declared_identifiers = self.listener.declared_identifiers.union(p.declared_identifiers)
self.listener.undeclared_identifiers = self.listener.undeclared_identifiers.union(p.undeclared_identifiers)
def visit(self, expr):
visitor.walk(expr, self) #, walker=walker())
class ParseFunc(object):
def __init__(self, listener, **exception_kwargs):
self.listener = listener
self.exception_kwargs = exception_kwargs
def visitFunction(self, node, *args):
self.listener.funcname = node.name
self.listener.argnames = node.argnames
self.listener.defaults = node.defaults
self.listener.varargs = node.varargs
self.listener.kwargs = node.kwargs
def visit(self, expr):
visitor.walk(expr, self)
class ExpressionGenerator(object):
"""given an AST node, generates an equivalent literal Python expression."""
def __init__(self, astnode):
self.buf = StringIO()
visitor.walk(astnode, self) #, walker=walker())
def value(self):
return self.buf.getvalue()
def operator(self, op, node, *args):
self.buf.write("(")
self.visit(node.left, *args)
self.buf.write(" %s " % op)
self.visit(node.right, *args)
self.buf.write(")")
def booleanop(self, op, node, *args):
self.visit(node.nodes[0])
for n in node.nodes[1:]:
self.buf.write(" " + op + " ")
self.visit(n, *args)
def visitConst(self, node, *args):
self.buf.write(repr(node.value))
def visitAssName(self, node, *args):
# TODO: figure out OP_ASSIGN, other OP_s
self.buf.write(node.name)
def visitName(self, node, *args):
self.buf.write(node.name)
def visitMul(self, node, *args):
self.operator("*", node, *args)
def visitAnd(self, node, *args):
self.booleanop("and", node, *args)
def visitOr(self, node, *args):
self.booleanop("or", node, *args)
def visitBitand(self, node, *args):
self.booleanop("&", node, *args)
def visitBitor(self, node, *args):
self.booleanop("|", node, *args)
def visitBitxor(self, node, *args):
self.booleanop("^", node, *args)
def visitAdd(self, node, *args):
self.operator("+", node, *args)
def visitGetattr(self, node, *args):
self.visit(node.expr, *args)
self.buf.write(".%s" % node.attrname)
def visitSub(self, node, *args):
self.operator("-", node, *args)
def visitNot(self, node, *args):
self.buf.write("not ")
self.visit(node.expr)
def visitDiv(self, node, *args):
self.operator("/", node, *args)
def visitFloorDiv(self, node, *args):
self.operator("//", node, *args)
def visitSubscript(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
[self.visit(x) for x in node.subs]
self.buf.write("]")
def visitUnarySub(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitUnaryAdd(self, node, *args):
self.buf.write("-")
self.visit(node.expr)
def visitSlice(self, node, *args):
self.visit(node.expr)
self.buf.write("[")
if node.lower is not None:
self.visit(node.lower)
self.buf.write(":")
if node.upper is not None:
self.visit(node.upper)
self.buf.write("]")
def visitDict(self, node):
self.buf.write("{")
c = node.getChildren()
for i in range(0, len(c), 2):
self.visit(c[i])
self.buf.write(": ")
self.visit(c[i+1])
if i<len(c) -2:
self.buf.write(", ")
self.buf.write("}")
def visitTuple(self, node):
self.buf.write("(")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write(")")
def visitList(self, node):
self.buf.write("[")
c = node.getChildren()
for i in range(0, len(c)):
self.visit(c[i])
if i<len(c) - 1:
self.buf.write(", ")
self.buf.write("]")
def visitListComp(self, node):
self.buf.write("[")
self.visit(node.expr)
self.buf.write(" ")
for n in node.quals:
self.visit(n)
self.buf.write("]")
def visitListCompFor(self, node):
self.buf.write(" for ")
self.visit(node.assign)
self.buf.write(" in ")
self.visit(node.list)
for n in node.ifs:
self.visit(n)
def visitListCompIf(self, node):
self.buf.write(" if ")
self.visit(node.test)
def visitCompare(self, node):
self.visit(node.expr)
for tup in node.ops:
self.buf.write(tup[0])
self.visit(tup[1])
def visitCallFunc(self, node, *args):
self.visit(node.node)
self.buf.write("(")
if len(node.args):
self.visit(node.args[0])
for a in node.args[1:]:
self.buf.write(", ")
self.visit(a)
self.buf.write(")")
class walker(visitor.ASTVisitor):
def dispatch(self, node, *args):
print "Node:", str(node)
#print "dir:", dir(node)
return visitor.ASTVisitor.dispatch(self, node, *args)
| {
"repo_name": "gabriel/shrub",
"path": "lib/mako/pyparser.py",
"copies": "6",
"size": "15783",
"license": "mit",
"hash": -5380666832497631000,
"line_mean": 41.3136729223,
"line_max": 249,
"alpha_frac": 0.545587024,
"autogenerated": false,
"ratio": 4.082514226590791,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.7628101250590791,
"avg_score": null,
"num_lines": null
} |
# ---------------- AStrading - Update --------------
# --------------- Created by Aron Svedin -----------
# -------------------- 2016-01-25 ------------------
'''
Update functions for Classes in Models
- Update SCTR - updatingSCTR()
in: array (Adj Close)
out: float (Average SCTR over SCTR_AVERAGE days, EMA50)
- Update Money wave - updatingMoneyWave()
in: array (High, Low, Adj Close, nextMWPrice = False, MW)
out: float (Money Wave)
- Sub func
Update next stock price for a fixed MW - if nextMWPrice = True
out: float (Price)
- Update weekly EMA Long Term (50) vs Sort Term (10) - updatingEMALTvsST()
in: array (Adj Close)
out: Boolean (or array for plot)
- Update CoppockCurve - updatingCoppock()
Not yet implemented!
in: ?
out: Boolean (or array for plot)
- Update plot - updatingPlot()
Not yet implemented!
in:
out:
'''
import pandas as pd
import numpy as np
import talib as tb
from config import SCTR_AVERAGE
def updatingSCTR(adjClose):
if len(adjClose) > 250:
# -- Long term SCTR --------------------
ema200 = tb.EMA(adjClose, timeperiod=200)
sctrEMA200 = ((adjClose/ema200)-1)
sctrROC125 = tb.ROC(adjClose, timeperiod=125)
longTerm = ((sctrEMA200*0.3) + (sctrROC125*0.3))
# -- Medium term SCTR ------------------
ema50 = tb.EMA(adjClose, timeperiod=50)
sctrEMA50 = ((adjClose/ema50)-1)
sctrROC20 = tb.ROC(adjClose, timeperiod=20)
mediumTerm = ((sctrEMA50*0.15) + (sctrROC20*0.15))
# -- Short term SCTR -------------------
ppo = tb.PPO(adjClose, fastperiod=12, slowperiod=26, matype=1)
ppoEMA = tb.EMA(ppo, timeperiod=9)
ppoHist = ppo - ppoEMA
ppoHistSlope = (ppoHist - np.roll(ppoHist,3))/3
ppoHistSlope[ppoHistSlope > 1] = 1
ppoHistSlope[ppoHistSlope < -1] = -1
rsi14 = tb.RSI(adjClose, timeperiod=14)
shortTerm = (((ppoHistSlope+1)*50)*0.05) + (rsi14*0.05)
sctr = (longTerm + mediumTerm + shortTerm)
return sctr[-1] #*SCTR_AVERAGE):].mean()
# Throw exception?
return None
def updatingMoneyWave(highp, lowp, closep, nextMWPrice = False):
if len(closep) > 10:
# slowk, slowd = tb.STOCH(highp, lowp, closep, fastk_period=5, slowk_period=3, slowk_matype=0, slowd_period=1, slowd_matype=0)
lowest, highest = pd.rolling_min(lowp, 5), pd.rolling_max(highp, 5)
stoch = 100 * (closep - lowest) / (highest - lowest)
# if nextMWPrice:
MWhigh = 80
MWlow = 20
slowk = pd.rolling_mean(stoch, 3)[-1]
if slowk > MWhigh:
newPrice = ((highest[-1]-lowest[-1])*(((MWhigh*3)-stoch[-1]-stoch[-2])/100)+lowest[-1])
print 'Buy below '
print newPrice
if nextMWPrice:
return newPrice
elif slowk < MWlow:
newPrice = ((highest[-1]-lowest[-1])*(((MWlow*3)-stoch[-1]-stoch[-2])/100)+lowest[-1])
print 'Buy above '
print newPrice
if nextMWPrice:
return newPrice
if nextMWPrice:
return 0
# preStoch = ((MW*3) - slowd[-1] - slowd[-2])/100
# newPrice = ((max(highp[-4:]) - min(lowp[-4:]))*preStoch)+min(lowp[-4:])
return slowk
# Throw exception?
return (None, None)
def updatingEMA50(adjClose):
if len(adjClose) > 60:
ema50 = tb.EMA(adjClose, timeperiod=50)
return adjClose[-1] > ema50[-1]
def updatingEMALTvsST(daily):
if len(daily['Adj Close']) > 300:
weekly = daily.asfreq('W-FRI', method='pad', how='end')
shortTerm = tb.EMA(weekly['Adj Close'].values, timeperiod=10)
longTerm = tb.EMA(weekly['Adj Close'].values, timeperiod=50)
return shortTerm[-1] > longTerm[-1]
# Throw exception
return None
def updatingCoppock():
return True
def updatingPlot():
return True
| {
"repo_name": "ASpelling/mw-trading",
"path": "app/update.py",
"copies": "1",
"size": "3665",
"license": "apache-2.0",
"hash": -5847947047986967000,
"line_mean": 26.6328125,
"line_max": 127,
"alpha_frac": 0.6163710778,
"autogenerated": false,
"ratio": 2.6557971014492754,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.37721681792492756,
"avg_score": null,
"num_lines": null
} |
"""Astral geocoder is a database of locations stored within the package.
To get the :class:`~astral.LocationInfo` for a location use the
:func:`~astral.geocoder.lookup` function e.g. ::
from astral.geocoder import lookup, database
l = lookup("London", database())
All locations stored in the database can be accessed using the `all_locations` generator ::
from astral.geocoder import all_locations
for location in all_locations:
print(location)
"""
from functools import reduce
from typing import Dict, Generator, List, Optional, Tuple, Union
from astral import LocationInfo, dms_to_float
__all__ = ["lookup", "database", "add_locations", "all_locations"]
# region Location Info
# name,region,timezone,latitude,longitude,elevation
_LOCATION_INFO = """Abu Dhabi,UAE,Asia/Dubai,24°28'N,54°22'E
Abu Dhabi,United Arab Emirates,Asia/Dubai,24°28'N,54°22'E
Abuja,Nigeria,Africa/Lagos,09°05'N,07°32'E
Accra,Ghana,Africa/Accra,05°35'N,00°06'W
Addis Ababa,Ethiopia,Africa/Addis_Ababa,09°02'N,38°42'E
Adelaide,Australia,Australia/Adelaide,34°56'S,138°36'E
Al Jubail,Saudi Arabia,Asia/Riyadh,25°24'N,49°39'W
Algiers,Algeria,Africa/Algiers,36°42'N,03°08'E
Amman,Jordan,Asia/Amman,31°57'N,35°52'E
Amsterdam,Netherlands,Europe/Amsterdam,52°23'N,04°54'E
Andorra la Vella,Andorra,Europe/Andorra,42°31'N,01°32'E
Ankara,Turkey,Europe/Istanbul,39°57'N,32°54'E
Antananarivo,Madagascar,Indian/Antananarivo,18°55'S,47°31'E
Apia,Samoa,Pacific/Apia,13°50'S,171°50'W
Ashgabat,Turkmenistan,Asia/Ashgabat,38°00'N,57°50'E
Asmara,Eritrea,Africa/Asmara,15°19'N,38°55'E
Astana,Kazakhstan,Asia/Qyzylorda,51°10'N,71°30'E
Asuncion,Paraguay,America/Asuncion,25°10'S,57°30'W
Athens,Greece,Europe/Athens,37°58'N,23°46'E
Avarua,Cook Islands,Etc/GMT-10,21°12'N,159°46'W
Baghdad,Iraq,Asia/Baghdad,33°20'N,44°30'E
Baku,Azerbaijan,Asia/Baku,40°29'N,49°56'E
Bamako,Mali,Africa/Bamako,12°34'N,07°55'W
Bandar Seri Begawan,Brunei Darussalam,Asia/Brunei,04°52'N,115°00'E
Bangkok,Thailand,Asia/Bangkok,13°45'N,100°35'E
Bangui,Central African Republic,Africa/Bangui,04°23'N,18°35'E
Banjul,Gambia,Africa/Banjul,13°28'N,16°40'W
Basse-Terre,Guadeloupe,America/Guadeloupe,16°00'N,61°44'W
Basseterre,Saint Kitts and Nevis,America/St_Kitts,17°17'N,62°43'W
Beijing,China,Asia/Harbin,39°55'N,116°20'E
Beirut,Lebanon,Asia/Beirut,33°53'N,35°31'E
Belfast,Northern Ireland,Europe/Belfast,54°36'N,5°56'W
Belgrade,Yugoslavia,Europe/Belgrade,44°50'N,20°37'E
Belmopan,Belize,America/Belize,17°18'N,88°30'W
Berlin,Germany,Europe/Berlin,52°30'N,13°25'E
Bern,Switzerland,Europe/Zurich,46°57'N,07°28'E
Bishkek,Kyrgyzstan,Asia/Bishkek,42°54'N,74°46'E
Bissau,Guinea-Bissau,Africa/Bissau,11°45'N,15°45'W
Bloemfontein,South Africa,Africa/Johannesburg,29°12'S,26°07'E
Bogota,Colombia,America/Bogota,04°34'N,74°00'W
Brasilia,Brazil,Brazil/East,15°47'S,47°55'W
Bratislava,Slovakia,Europe/Bratislava,48°10'N,17°07'E
Brazzaville,Congo,Africa/Brazzaville,04°09'S,15°12'E
Bridgetown,Barbados,America/Barbados,13°05'N,59°30'W
Brisbane,Australia,Australia/Brisbane,27°30'S,153°01'E
Brussels,Belgium,Europe/Brussels,50°51'N,04°21'E
Bucharest,Romania,Europe/Bucharest,44°27'N,26°10'E
Bucuresti,Romania,Europe/Bucharest,44°27'N,26°10'E
Budapest,Hungary,Europe/Budapest,47°29'N,19°05'E
Buenos Aires,Argentina,America/Buenos_Aires,34°62'S,58°44'W
Bujumbura,Burundi,Africa/Bujumbura,03°16'S,29°18'E
Cairo,Egypt,Africa/Cairo,30°01'N,31°14'E
Canberra,Australia,Australia/Canberra,35°15'S,149°08'E
Cape Town,South Africa,Africa/Johannesburg,33°55'S,18°22'E
Caracas,Venezuela,America/Caracas,10°30'N,66°55'W
Castries,Saint Lucia,America/St_Lucia,14°02'N,60°58'W
Cayenne,French Guiana,America/Cayenne,05°05'N,52°18'W
Charlotte Amalie,United States of Virgin Islands,America/Virgin,18°21'N,64°56'W
Chisinau,Moldova,Europe/Chisinau,47°02'N,28°50'E
Conakry,Guinea,Africa/Conakry,09°29'N,13°49'W
Copenhagen,Denmark,Europe/Copenhagen,55°41'N,12°34'E
Cotonou,Benin,Africa/Porto-Novo,06°23'N,02°42'E
Dakar,Senegal,Africa/Dakar,14°34'N,17°29'W
Damascus,Syrian Arab Republic,Asia/Damascus,33°30'N,36°18'E
Dammam,Saudi Arabia,Asia/Riyadh,26°30'N,50°12'E
Darwin,Australia,Australia/Darwin,12°26'S,130°50'E
Dhaka,Bangladesh,Asia/Dhaka,23°43'N,90°26'E
Dili,East Timor,Asia/Dili,08°29'S,125°34'E
Djibouti,Djibouti,Africa/Djibouti,11°08'N,42°20'E
Dodoma,United Republic of Tanzania,Africa/Dar_es_Salaam,06°08'S,35°45'E
Doha,Qatar,Asia/Qatar,25°15'N,51°35'E
Douglas,Isle Of Man,Europe/London,54°9'N,4°29'W
Dublin,Ireland,Europe/Dublin,53°21'N,06°15'W
Dushanbe,Tajikistan,Asia/Dushanbe,38°33'N,68°48'E
El Aaiun,Morocco,UTC,27°9'N,13°12'W
Fort-de-France,Martinique,America/Martinique,14°36'N,61°02'W
Freetown,Sierra Leone,Africa/Freetown,08°30'N,13°17'W
Funafuti,Tuvalu,Pacific/Funafuti,08°31'S,179°13'E
Gaborone,Botswana,Africa/Gaborone,24°45'S,25°57'E
George Town,Cayman Islands,America/Cayman,19°20'N,81°24'W
Georgetown,Guyana,America/Guyana,06°50'N,58°12'W
Gibraltar,Gibraltar,Europe/Gibraltar,36°9'N,5°21'W
Guatemala,Guatemala,America/Guatemala,14°40'N,90°22'W
Hanoi,Viet Nam,Asia/Saigon,21°05'N,105°55'E
Harare,Zimbabwe,Africa/Harare,17°43'S,31°02'E
Havana,Cuba,America/Havana,23°08'N,82°22'W
Helsinki,Finland,Europe/Helsinki,60°15'N,25°03'E
Hobart,Tasmania,Australia/Hobart,42°53'S,147°19'E
Hong Kong,China,Asia/Hong_Kong,22°16'N,114°09'E
Honiara,Solomon Islands,Pacific/Guadalcanal,09°27'S,159°57'E
Islamabad,Pakistan,Asia/Karachi,33°40'N,73°10'E
Jakarta,Indonesia,Asia/Jakarta,06°09'S,106°49'E
Jerusalem,Israel,Asia/Jerusalem,31°47'N,35°12'E
Juba,South Sudan,Africa/Juba,4°51'N,31°36'E
Jubail,Saudi Arabia,Asia/Riyadh,27°02'N,49°39'E
Kabul,Afghanistan,Asia/Kabul,34°28'N,69°11'E
Kampala,Uganda,Africa/Kampala,00°20'N,32°30'E
Kathmandu,Nepal,Asia/Kathmandu,27°45'N,85°20'E
Khartoum,Sudan,Africa/Khartoum,15°31'N,32°35'E
Kiev,Ukraine,Europe/Kiev,50°30'N,30°28'E
Kigali,Rwanda,Africa/Kigali,01°59'S,30°04'E
Kingston,Jamaica,America/Jamaica,18°00'N,76°50'W
Kingston,Norfolk Island,Pacific/Norfolk,45°20'S,168°43'E
Kingstown,Saint Vincent and the Grenadines,America/St_Vincent,13°10'N,61°10'W
Kinshasa,Democratic Republic of the Congo,Africa/Kinshasa,04°20'S,15°15'E
Koror,Palau,Pacific/Palau,07°20'N,134°28'E
Kuala Lumpur,Malaysia,Asia/Kuala_Lumpur,03°09'N,101°41'E
Kuwait,Kuwait,Asia/Kuwait,29°30'N,48°00'E
La Paz,Bolivia,America/La_Paz,16°20'S,68°10'W
Libreville,Gabon,Africa/Libreville,00°25'N,09°26'E
Lilongwe,Malawi,Africa/Blantyre,14°00'S,33°48'E
Lima,Peru,America/Lima,12°00'S,77°00'W
Lisbon,Portugal,Europe/Lisbon,38°42'N,09°10'W
Ljubljana,Slovenia,Europe/Ljubljana,46°04'N,14°33'E
Lome,Togo,Africa/Lome,06°09'N,01°20'E
London,England,Europe/London,51°28'24"N,00°00'3"W
Luanda,Angola,Africa/Luanda,08°50'S,13°15'E
Lusaka,Zambia,Africa/Lusaka,15°28'S,28°16'E
Luxembourg,Luxembourg,Europe/Luxembourg,49°37'N,06°09'E
Macau,Macao,Asia/Macau,22°12'N,113°33'E
Madinah,Saudi Arabia,Asia/Riyadh,24°28'N,39°36'E
Madrid,Spain,Europe/Madrid,40°25'N,03°45'W
Majuro,Marshall Islands,Pacific/Majuro,7°4'N,171°16'E
Makkah,Saudi Arabia,Asia/Riyadh,21°26'N,39°49'E
Malabo,Equatorial Guinea,Africa/Malabo,03°45'N,08°50'E
Male,Maldives,Indian/Maldives,04°00'N,73°28'E
Mamoudzou,Mayotte,Indian/Mayotte,12°48'S,45°14'E
Managua,Nicaragua,America/Managua,12°06'N,86°20'W
Manama,Bahrain,Asia/Bahrain,26°10'N,50°30'E
Manila,Philippines,Asia/Manila,14°40'N,121°03'E
Maputo,Mozambique,Africa/Maputo,25°58'S,32°32'E
Maseru,Lesotho,Africa/Maseru,29°18'S,27°30'E
Masqat,Oman,Asia/Muscat,23°37'N,58°36'E
Mbabane,Swaziland,Africa/Mbabane,26°18'S,31°06'E
Mecca,Saudi Arabia,Asia/Riyadh,21°26'N,39°49'E
Medina,Saudi Arabia,Asia/Riyadh,24°28'N,39°36'E
Melbourne,Australia,Australia/Melbourne,37°48'S,144°57'E
Mexico,Mexico,America/Mexico_City,19°20'N,99°10'W
Minsk,Belarus,Europe/Minsk,53°52'N,27°30'E
Mogadishu,Somalia,Africa/Mogadishu,02°02'N,45°25'E
Monaco,Priciplality Of Monaco,Europe/Monaco,43°43'N,7°25'E
Monrovia,Liberia,Africa/Monrovia,06°18'N,10°47'W
Montevideo,Uruguay,America/Montevideo,34°50'S,56°11'W
Moroni,Comoros,Indian/Comoro,11°40'S,43°16'E
Moscow,Russian Federation,Europe/Moscow,55°45'N,37°35'E
Moskva,Russian Federation,Europe/Moscow,55°45'N,37°35'E
Mumbai,India,Asia/Kolkata,18°58'N,72°49'E
Muscat,Oman,Asia/Muscat,23°37'N,58°32'E
N'Djamena,Chad,Africa/Ndjamena,12°10'N,14°59'E
Nairobi,Kenya,Africa/Nairobi,01°17'S,36°48'E
Nassau,Bahamas,America/Nassau,25°05'N,77°20'W
Naypyidaw,Myanmar,Asia/Rangoon,19°45'N,96°6'E
New Delhi,India,Asia/Kolkata,28°37'N,77°13'E
Ngerulmud,Palau,Pacific/Palau,7°30'N,134°37'E
Niamey,Niger,Africa/Niamey,13°27'N,02°06'E
Nicosia,Cyprus,Asia/Nicosia,35°10'N,33°25'E
Nouakchott,Mauritania,Africa/Nouakchott,20°10'S,57°30'E
Noumea,New Caledonia,Pacific/Noumea,22°17'S,166°30'E
Nuku'alofa,Tonga,Pacific/Tongatapu,21°10'S,174°00'W
Nuuk,Greenland,America/Godthab,64°10'N,51°35'W
Oranjestad,Aruba,America/Aruba,12°32'N,70°02'W
Oslo,Norway,Europe/Oslo,59°55'N,10°45'E
Ottawa,Canada,US/Eastern,45°27'N,75°42'W
Ouagadougou,Burkina Faso,Africa/Ouagadougou,12°15'N,01°30'W
P'yongyang,Democratic People's Republic of Korea,Asia/Pyongyang,39°09'N,125°30'E
Pago Pago,American Samoa,Pacific/Pago_Pago,14°16'S,170°43'W
Palikir,Micronesia,Pacific/Ponape,06°55'N,158°09'E
Panama,Panama,America/Panama,09°00'N,79°25'W
Papeete,French Polynesia,Pacific/Tahiti,17°32'S,149°34'W
Paramaribo,Suriname,America/Paramaribo,05°50'N,55°10'W
Paris,France,Europe/Paris,48°50'N,02°20'E
Perth,Australia,Australia/Perth,31°56'S,115°50'E
Phnom Penh,Cambodia,Asia/Phnom_Penh,11°33'N,104°55'E
Podgorica,Montenegro,Europe/Podgorica,42°28'N,19°16'E
Port Louis,Mauritius,Indian/Mauritius,20°9'S,57°30'E
Port Moresby,Papua New Guinea,Pacific/Port_Moresby,09°24'S,147°08'E
Port-Vila,Vanuatu,Pacific/Efate,17°45'S,168°18'E
Port-au-Prince,Haiti,America/Port-au-Prince,18°40'N,72°20'W
Port of Spain,Trinidad and Tobago,America/Port_of_Spain,10°40'N,61°31'W
Porto-Novo,Benin,Africa/Porto-Novo,06°23'N,02°42'E
Prague,Czech Republic,Europe/Prague,50°05'N,14°22'E
Praia,Cape Verde,Atlantic/Cape_Verde,15°02'N,23°34'W
Pretoria,South Africa,Africa/Johannesburg,25°44'S,28°12'E
Pristina,Albania,Europe/Tirane,42°40'N,21°10'E
Quito,Ecuador,America/Guayaquil,00°15'S,78°35'W
Rabat,Morocco,Africa/Casablanca,34°1'N,6°50'W
Reykjavik,Iceland,Atlantic/Reykjavik,64°10'N,21°57'W
Riga,Latvia,Europe/Riga,56°53'N,24°08'E
Riyadh,Saudi Arabia,Asia/Riyadh,24°41'N,46°42'E
Road Town,British Virgin Islands,America/Virgin,18°27'N,64°37'W
Rome,Italy,Europe/Rome,41°54'N,12°29'E
Roseau,Dominica,America/Dominica,15°20'N,61°24'W
Saint Helier,Jersey,Etc/GMT,49°11'N,2°6'W
Saint Pierre,Saint Pierre and Miquelon,America/Miquelon,46°46'N,56°12'W
Saipan,Northern Mariana Islands,Pacific/Saipan,15°12'N,145°45'E
Sana,Yemen,Asia/Aden,15°20'N,44°12'W
Sana'a,Yemen,Asia/Aden,15°20'N,44°12'W
San Jose,Costa Rica,America/Costa_Rica,09°55'N,84°02'W
San Juan,Puerto Rico,America/Puerto_Rico,18°28'N,66°07'W
San Marino,San Marino,Europe/San_Marino,43°55'N,12°30'E
San Salvador,El Salvador,America/El_Salvador,13°40'N,89°10'W
Santiago,Chile,America/Santiago,33°24'S,70°40'W
Santo Domingo,Dominica Republic,America/Santo_Domingo,18°30'N,69°59'W
Sao Tome,Sao Tome and Principe,Africa/Sao_Tome,00°10'N,06°39'E
Sarajevo,Bosnia and Herzegovina,Europe/Sarajevo,43°52'N,18°26'E
Seoul,Republic of Korea,Asia/Seoul,37°31'N,126°58'E
Singapore,Republic of Singapore,Asia/Singapore,1°18'N,103°48'E
Skopje,The Former Yugoslav Republic of Macedonia,Europe/Skopje,42°01'N,21°26'E
Sofia,Bulgaria,Europe/Sofia,42°45'N,23°20'E
Sri Jayawardenapura Kotte,Sri Lanka,Asia/Colombo,6°54'N,79°53'E
St. George's,Grenada,America/Grenada,32°22'N,64°40'W
St. John's,Antigua and Barbuda,America/Antigua,17°7'N,61°51'W
St. Peter Port,Guernsey,Europe/Guernsey,49°26'N,02°33'W
Stanley,Falkland Islands,Atlantic/Stanley,51°40'S,59°51'W
Stockholm,Sweden,Europe/Stockholm,59°20'N,18°05'E
Sucre,Bolivia,America/La_Paz,16°20'S,68°10'W
Suva,Fiji,Pacific/Fiji,18°06'S,178°30'E
Sydney,Australia,Australia/Sydney,33°53'S,151°13'E
Taipei,Republic of China (Taiwan),Asia/Taipei,25°02'N,121°38'E
T'bilisi,Georgia,Asia/Tbilisi,41°43'N,44°50'E
Tbilisi,Georgia,Asia/Tbilisi,41°43'N,44°50'E
Tallinn,Estonia,Europe/Tallinn,59°22'N,24°48'E
Tarawa,Kiribati,Pacific/Tarawa,01°30'N,173°00'E
Tashkent,Uzbekistan,Asia/Tashkent,41°20'N,69°10'E
Tegucigalpa,Honduras,America/Tegucigalpa,14°05'N,87°14'W
Tehran,Iran,Asia/Tehran,35°44'N,51°30'E
Thimphu,Bhutan,Asia/Thimphu,27°31'N,89°45'E
Tirana,Albania,Europe/Tirane,41°18'N,19°49'E
Tirane,Albania,Europe/Tirane,41°18'N,19°49'E
Torshavn,Faroe Islands,Atlantic/Faroe,62°05'N,06°56'W
Tokyo,Japan,Asia/Tokyo,35°41'N,139°41'E
Tripoli,Libyan Arab Jamahiriya,Africa/Tripoli,32°49'N,13°07'E
Tunis,Tunisia,Africa/Tunis,36°50'N,10°11'E
Ulan Bator,Mongolia,Asia/Ulaanbaatar,47°55'N,106°55'E
Ulaanbaatar,Mongolia,Asia/Ulaanbaatar,47°55'N,106°55'E
Vaduz,Liechtenstein,Europe/Vaduz,47°08'N,09°31'E
Valletta,Malta,Europe/Malta,35°54'N,14°31'E
Vienna,Austria,Europe/Vienna,48°12'N,16°22'E
Vientiane,Lao People's Democratic Republic,Asia/Vientiane,17°58'N,102°36'E
Vilnius,Lithuania,Europe/Vilnius,54°38'N,25°19'E
W. Indies,Antigua and Barbuda,America/Antigua,17°20'N,61°48'W
Warsaw,Poland,Europe/Warsaw,52°13'N,21°00'E
Washington DC,USA,US/Eastern,39°91'N,77°02'W
Wellington,New Zealand,Pacific/Auckland,41°19'S,174°46'E
Willemstad,Netherlands Antilles,America/Curacao,12°05'N,69°00'W
Windhoek,Namibia,Africa/Windhoek,22°35'S,17°04'E
Yamoussoukro,Cote d'Ivoire,Africa/Abidjan,06°49'N,05°17'W
Yangon,Myanmar,Asia/Rangoon,16°45'N,96°20'E
Yaounde,Cameroon,Africa/Douala,03°50'N,11°35'E
Yaren,Nauru,Pacific/Nauru,0°32'S,166°55'E
Yerevan,Armenia,Asia/Yerevan,40°10'N,44°31'E
Zagreb,Croatia,Europe/Zagreb,45°50'N,15°58'E
# UK Cities
Aberdeen,Scotland,Europe/London,57°08'N,02°06'W
Birmingham,England,Europe/London,52°30'N,01°50'W
Bolton,England,Europe/London,53°35'N,02°15'W
Bradford,England,Europe/London,53°47'N,01°45'W
Bristol,England,Europe/London,51°28'N,02°35'W
Cardiff,Wales,Europe/London,51°29'N,03°13'W
Crawley,England,Europe/London,51°8'N,00°10'W
Edinburgh,Scotland,Europe/London,55°57'N,03°13'W
Glasgow,Scotland,Europe/London,55°50'N,04°15'W
Greenwich,England,Europe/London,51°28'N,00°00'W
Leeds,England,Europe/London,53°48'N,01°35'W
Leicester,England,Europe/London,52°38'N,01°08'W
Liverpool,England,Europe/London,53°25'N,03°00'W
Manchester,England,Europe/London,53°30'N,02°15'W
Newcastle Upon Tyne,England,Europe/London,54°59'N,01°36'W
Newcastle,England,Europe/London,54°59'N,01°36'W
Norwich,England,Europe/London,52°38'N,01°18'E
Oxford,England,Europe/London,51°45'N,01°15'W
Plymouth,England,Europe/London,50°25'N,04°15'W
Portsmouth,England,Europe/London,50°48'N,01°05'W
Reading,England,Europe/London,51°27'N,0°58'W
Sheffield,England,Europe/London,53°23'N,01°28'W
Southampton,England,Europe/London,50°55'N,01°25'W
Swansea,England,Europe/London,51°37'N,03°57'W
Swindon,England,Europe/London,51°34'N,01°47'W
Wolverhampton,England,Europe/London,52°35'N,2°08'W
Barrow-In-Furness,England,Europe/London,54°06'N,3°13'W
# US State Capitals
Montgomery,USA,US/Central,32°21'N,86°16'W
Juneau,USA,US/Alaska,58°23'N,134°11'W
Phoenix,USA,America/Phoenix,33°26'N,112°04'W
Little Rock,USA,US/Central,34°44'N,92°19'W
Sacramento,USA,US/Pacific,38°33'N,121°28'W
Denver,USA,US/Mountain,39°44'N,104°59'W
Hartford,USA,US/Eastern,41°45'N,72°41'W
Dover,USA,US/Eastern,39°09'N,75°31'W
Tallahassee,USA,US/Eastern,30°27'N,84°16'W
Atlanta,USA,US/Eastern,33°45'N,84°23'W
Honolulu,USA,US/Hawaii,21°18'N,157°49'W
Boise,USA,US/Mountain,43°36'N,116°12'W
Springfield,USA,US/Central,39°47'N,89°39'W
Indianapolis,USA,US/Eastern,39°46'N,86°9'W
Des Moines,USA,US/Central,41°35'N,93°37'W
Topeka,USA,US/Central,39°03'N,95°41'W
Frankfort,USA,US/Eastern,38°11'N,84°51'W
Baton Rouge,USA,US/Central,30°27'N,91°8'W
Augusta,USA,US/Eastern,44°18'N,69°46'W
Annapolis,USA,US/Eastern,38°58'N,76°30'W
Boston,USA,US/Eastern,42°21'N,71°03'W
Lansing,USA,US/Eastern,42°44'N,84°32'W
Saint Paul,USA,US/Central,44°56'N,93°05'W
Jackson,USA,US/Central,32°17'N,90°11'W
Jefferson City,USA,US/Central,38°34'N,92°10'W
Helena,USA,US/Mountain,46°35'N,112°1'W
Lincoln,USA,US/Central,40°48'N,96°40'W
Carson City,USA,US/Pacific,39°9'N,119°45'W
Concord,USA,US/Eastern,43°12'N,71°32'W
Trenton,USA,US/Eastern,40°13'N,74°45'W
Santa Fe,USA,US/Mountain,35°40'N,105°57'W
Albany,USA,US/Eastern,42°39'N,73°46'W
Raleigh,USA,US/Eastern,35°49'N,78°38'W
Bismarck,USA,US/Central,46°48'N,100°46'W
Columbus,USA,US/Eastern,39°59'N,82°59'W
Oklahoma City,USA,US/Central,35°28'N,97°32'W
Salem,USA,US/Pacific,44°55'N,123°1'W
Harrisburg,USA,US/Eastern,40°16'N,76°52'W
Providence,USA,US/Eastern,41°49'N,71°25'W
Columbia,USA,US/Eastern,34°00'N,81°02'W
Pierre,USA,US/Central,44°22'N,100°20'W
Nashville,USA,US/Central,36°10'N,86°47'W
Austin,USA,US/Central,30°16'N,97°45'W
Salt Lake City,USA,US/Mountain,40°45'N,111°53'W
Montpelier,USA,US/Eastern,44°15'N,72°34'W
Richmond,USA,US/Eastern,37°32'N,77°25'W
Olympia,USA,US/Pacific,47°2'N,122°53'W
Charleston,USA,US/Eastern,38°20'N,81°38'W
Madison,USA,US/Central,43°4'N,89°24'W
Cheyenne,USA,US/Mountain,41°8'N,104°48'W
# Major US Cities
Birmingham,USA,US/Central,33°39'N,86°48'W
Anchorage,USA,US/Alaska,61°13'N,149°53'W
Los Angeles,USA,US/Pacific,34°03'N,118°15'W
San Francisco,USA,US/Pacific,37°46'N,122°25'W
Bridgeport,USA,US/Eastern,41°11'N,73°11'W
Wilmington,USA,US/Eastern,39°44'N,75°32'W
Jacksonville,USA,US/Eastern,30°19'N,81°39'W
Miami,USA,US/Eastern,26°8'N,80°12'W
Chicago,USA,US/Central,41°50'N,87°41'W
Wichita,USA,US/Central,37°41'N,97°20'W
Louisville,USA,US/Eastern,38°15'N,85°45'W
New Orleans,USA,US/Central,29°57'N,90°4'W
Portland,USA,US/Eastern,43°39'N,70°16'W
Baltimore,USA,US/Eastern,39°17'N,76°37'W
Detroit,USA,US/Eastern,42°19'N,83°2'W
Minneapolis,USA,US/Central,44°58'N,93°15'W
Kansas City,USA,US/Central,39°06'N,94°35'W
Billings,USA,US/Mountain,45°47'N,108°32'W
Omaha,USA,US/Central,41°15'N,96°0'W
Las Vegas,USA,US/Pacific,36°10'N,115°08'W
Manchester,USA,US/Eastern,42°59'N,71°27'W
Newark,USA,US/Eastern,40°44'N,74°11'W
Albuquerque,USA,US/Mountain,35°06'N,106°36'W
New York,USA,US/Eastern,40°43'N,74°0'W
Charlotte,USA,US/Eastern,35°13'N,80°50'W
Fargo,USA,US/Central,46°52'N,96°47'W
Cleveland,USA,US/Eastern,41°28'N,81°40'W
Philadelphia,USA,US/Eastern,39°57'N,75°10'W
Sioux Falls,USA,US/Central,43°32'N,96°43'W
Memphis,USA,US/Central,35°07'N,89°58'W
Houston,USA,US/Central,29°45'N,95°22'W
Dallas,USA,US/Central,32°47'N,96°48'W
Burlington,USA,US/Eastern,44°28'N,73°9'W
Virginia Beach,USA,US/Eastern,36°50'N,76°05'W
Seattle,USA,US/Pacific,47°36'N,122°19'W
Milwaukee,USA,US/Central,43°03'N,87°57'W
San Diego,USA,US/Pacific,32°42'N,117°09'W
Orlando,USA,US/Eastern,28°32'N,81°22'W
Buffalo,USA,US/Eastern,42°54'N,78°50'W
Toledo,USA,US/Eastern,41°39'N,83°34'W
# Canadian cities
Vancouver,Canada,America/Vancouver,49°15'N,123°6'W
Calgary,Canada,America/Edmonton,51°2'N,114°3'W
Edmonton,Canada,America/Edmonton,53°32'N,113°29'W
Saskatoon,Canada,America/Regina,52°8'N,106°40'W
Regina,Canada,America/Regina,50°27'N,104°36'W
Winnipeg,Canada,America/Winnipeg,49°53'N,97°8'W
Toronto,Canada,America/Toronto,43°39'N,79°22'W
Montreal,Canada,America/Montreal,45°30'N,73°33'W
Quebec,Canada,America/Toronto,46°48'N,71°14'W
Fredericton,Canada,America/Halifax,45°57'N,66°38'W
Halifax,Canada,America/Halifax,44°38'N,63°34'W
Charlottetown,Canada,America/Halifax,46°14'N,63°7'W
St. John's,Canada,America/Halifax,47°33'N,52°42'W
Whitehorse,Canada,America/Whitehorse,60°43'N,135°3'W
Yellowknife,Canada,America/Yellowknife,62°27'N,114°22'W
Iqaluit,Canada,America/Iqaluit,63°44'N,68°31'W
"""
# endregion
GroupName = str
GroupInfo = Dict
LocationInfoList = List[LocationInfo]
LocationDatabase = Dict[GroupName, GroupInfo[str, LocationInfoList]]
def database() -> LocationDatabase:
"""Returns a database populated with the inital set of locations stored
in this module
"""
db: LocationDatabase = {}
_add_locations_from_str(_LOCATION_INFO, db)
return db
def _sanitize_key(key) -> str:
"""Sanitize the location or group key to look up
Args:
key: The key to sanitize
"""
return str(key).lower().replace(" ", "_")
def _location_count(db: LocationDatabase) -> int:
"""Returns the count of the locations currently in the database"""
return reduce(lambda count, group: count + len(group), db.values(), 0)
def _get_group(name: str, db: LocationDatabase) -> Optional[GroupInfo]:
return db.get(name, None)
def _add_location_to_db(location: LocationInfo, db: LocationDatabase) -> None:
"""Add a single location to a database"""
key = _sanitize_key(location.timezone_group)
group = _get_group(key, db)
if not group:
group = {}
db[key] = group
location_key = _sanitize_key(location.name)
if location_key not in group:
group[location_key] = [location]
else:
group[location_key].append(location)
def _indexable_to_locationinfo(idxable) -> LocationInfo:
return LocationInfo(
name=idxable[0],
region=idxable[1],
timezone=idxable[2],
latitude=dms_to_float(idxable[3], 90.0),
longitude=dms_to_float(idxable[4], 180.0),
)
def _add_locations_from_str(location_string: str, db: LocationDatabase) -> None:
"""Add locations from a string."""
for line in location_string.split("\n"):
line = line.strip()
if line != "" and line[0] != "#":
info = line.split(",")
location = _indexable_to_locationinfo(info)
_add_location_to_db(location, db)
def _add_locations_from_list(
location_list: List[Union[Tuple, str]], db: LocationDatabase
) -> None:
"""Add locations from a list of either strings or lists of strings or tuples of strings."""
for info in location_list:
if isinstance(info, str):
_add_locations_from_str(info, db)
elif isinstance(info, (list, tuple)):
location = _indexable_to_locationinfo(info)
_add_location_to_db(location, db)
def add_locations(locations: Union[List, str], db: LocationDatabase) -> None:
"""Add locations to the database.
Locations can be added by passing either a string with one line per location or by passing
a list containing strings, lists or tuples (lists and tuples are passed directly to the
LocationInfo constructor)."""
if isinstance(locations, str):
_add_locations_from_str(locations, db)
elif isinstance(locations, (list, tuple)):
_add_locations_from_list(locations, db)
def group(region: str, db: LocationDatabase) -> GroupInfo:
"""Access to each timezone group. For example London is in timezone
group Europe.
Lookups are case insensitive
Args:
region: the name to look up
Raises:
KeyError: if the location is not found
"""
key = _sanitize_key(region)
for name, value in db.items():
if name == key:
return value
raise KeyError(f"Unrecognised Group - {region}")
def lookup_in_group(location: str, group: Dict) -> LocationInfo:
"""Looks up the location within a group dictionary
You can supply an optional region name by adding a comma
followed by the region name. Where multiple locations have the
same name you may need to supply the region name otherwise
the first result will be returned which may not be the one
you're looking for::
location = group['Abu Dhabi,United Arab Emirates']
Lookups are case insensitive.
Args:
location: The location to look up
group: The location group to look in
Raises:
KeyError: if the location is not found
"""
key = _sanitize_key(location)
try:
lookup_name, lookup_region = key.split(",", 1)
except ValueError:
lookup_name = key
lookup_region = ""
lookup_name = lookup_name.strip("\"'")
lookup_region = lookup_region.strip("\"'")
for (location_name, location_list) in group.items():
if location_name == lookup_name:
if lookup_region == "":
return location_list[0]
for loc in location_list:
if _sanitize_key(loc.region) == lookup_region:
return loc
raise KeyError(f"Unrecognised location name - {key}")
def lookup(name: str, db: LocationDatabase) -> Union[Dict, LocationInfo]:
"""Look up a name in a database.
If a group with the name specified is a group name then that will
be returned. If no group is found a location with the name will be
looked up.
Args:
name: The group/location name to look up
db: The location database to look in
Raises:
KeyError: if the name is not found
"""
key = _sanitize_key(name)
for group_key, group in db.items():
if group_key == key:
return group
try:
return lookup_in_group(name, group)
except KeyError:
pass
raise KeyError(f"Unrecognised name - {name}")
def all_locations(db: LocationDatabase) -> Generator[LocationInfo, None, None]:
"""A generator that returns all the :class:`~astral.LocationInfo`\\s contained in the database
"""
for group_info in db.values():
for location_list in group_info.values():
for location in location_list:
yield location
| {
"repo_name": "sffjunkie/astral",
"path": "src/astral/geocoder.py",
"copies": "2",
"size": "25932",
"license": "apache-2.0",
"hash": 805844747248731600,
"line_mean": 40.114379085,
"line_max": 98,
"alpha_frac": 0.7449328352,
"autogenerated": false,
"ratio": 1.9637867790525247,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.3708719614252525,
"avg_score": null,
"num_lines": null
} |
""" A StreamHandler that extracts messages from streams """
from __future__ import print_function
from collections import defaultdict
import sys
import traceback
from struct import unpack
from .thrift_message import ThriftMessage
class StreamContext(object):
def __init__(self):
self.bytes = b''
class StreamHandler(object):
def __init__(self,
outqueue,
protocol=None,
finagle_thrift=False,
max_message_size=1024*1000,
read_values=False,
debug=False,
framed=False):
self._contexts_by_streams = defaultdict(StreamContext)
self._pop_size = 1024 # TODO: what's a good value here?
self._outqueue = outqueue
self._protocol = protocol
self._finagle_thrift = finagle_thrift
self._max_message_size = max_message_size
self._debug = debug
self._framed = framed
self._read_values = read_values
self._seen_messages = 0
self._recognized_streams = set() # streams from which msgs have been read
def __call__(self, *args, **kwargs):
self.handler(*args, **kwargs)
@property
def seen_streams(self):
return len(self._contexts_by_streams)
@property
def recognized_streams(self):
return len(self._recognized_streams)
@property
def unrecognized_streams(self):
return self.seen_streams - self.recognized_streams
@property
def pending_thrift_msgs(self):
return len(self._outqueue)
@property
def seen_thrift_msgs(self):
return self._seen_messages
def handler(self, stream):
context = self._contexts_by_streams[stream]
bytes, timestamp = stream.pop_data(self._pop_size)
context.bytes += bytes
# EMSGSIZE
if len(context.bytes) >= self._max_message_size:
if self._debug:
print('Dropping bytes, dropped size: %d' % len(context.bytes))
context.bytes = ''
return
# FIXME: a bit of brute force to find the start of a message.
# Is there a magic byte/string we can look for?
start = 0
if self._framed:
if len(context.bytes) == 4: # just frame size, no frame
return
frame_size = unpack('!i', context.bytes[:4])[0]
if len(context.bytes) < frame_size + 4:
return
start = 4
view = memoryview(context.bytes)
for idx in range(start, len(context.bytes)):
try:
data_slice = view[idx:]
msg, msglen = ThriftMessage.read(
data_slice,
protocol=self._protocol,
finagle_thrift=self._finagle_thrift,
read_values=self._read_values)
except EOFError:
continue
except Exception as ex:
if self._debug:
print('Bad message for stream %s: %s: %s\n(idx=%d) '
'(context size=%d)' % (
stream,
ex,
traceback.format_exc(),
idx,
len(context.bytes)),
file=sys.stderr
)
continue
self._recognized_streams.add(stream)
self._seen_messages += 1
self._outqueue.append((timestamp, stream.src, stream.dst, msg))
context.bytes = context.bytes[idx + msglen:]
break
| {
"repo_name": "pinterest/thrift-tools",
"path": "thrift_tools/stream_handler.py",
"copies": "1",
"size": "3659",
"license": "apache-2.0",
"hash": -1511092608695136500,
"line_mean": 29.7478991597,
"line_max": 82,
"alpha_frac": 0.5285597158,
"autogenerated": false,
"ratio": 4.473105134474328,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5501664850274328,
"avg_score": null,
"num_lines": null
} |
""" A StreamHandler that extracts messages from streams """
from __future__ import print_function
from collections import defaultdict
import sys
import traceback
from .thrift_message import ThriftMessage
class StreamContext(object):
def __init__(self):
self.bytes = ''
class StreamHandler(object):
def __init__(self,
outqueue,
protocol=None,
finagle_thrift=False,
max_message_size=1024*1000,
read_values=False,
debug=False):
self._contexts_by_streams = defaultdict(StreamContext)
self._pop_size = 1024 # TODO: what's a good value here?
self._outqueue = outqueue
self._protocol = protocol
self._finagle_thrift = finagle_thrift
self._max_message_size = max_message_size
self._debug = debug
self._read_values = read_values
self._seen_messages = 0
self._recognized_streams = set() # streams from which msgs have been read
def __call__(self, *args, **kwargs):
self.handler(*args, **kwargs)
@property
def seen_streams(self):
return len(self._contexts_by_streams)
@property
def recognized_streams(self):
return len(self._recognized_streams)
@property
def unrecognized_streams(self):
return self.seen_streams - self.recognized_streams
@property
def pending_thrift_msgs(self):
return len(self._outqueue)
@property
def seen_thrift_msgs(self):
return self._seen_messages
def handler(self, stream):
context = self._contexts_by_streams[stream]
bytes, timestamp = stream.pop_data(self._pop_size)
context.bytes += bytes
# EMSGSIZE
if len(context.bytes) >= self._max_message_size:
if self._debug:
print('Dropping bytes, dropped size: %d' % len(context.bytes))
context.bytes = ''
return
# FIXME: a bit of brute force to find the start of a message.
# Is there a magic byte/string we can look for?
view = memoryview(context.bytes)
for idx in range(0, len(context.bytes)):
try:
data_slice = view[idx:].tobytes()
msg, msglen = ThriftMessage.read(
data_slice,
protocol=self._protocol,
finagle_thrift=self._finagle_thrift,
read_values=self._read_values)
except EOFError:
continue
except Exception as ex:
if self._debug:
print('Bad message for stream %s: %s: %s\n(idx=%d) '
'(context size=%d)' % (
stream,
ex,
traceback.format_exc(),
idx,
len(context.bytes)),
file=sys.stderr
)
continue
self._recognized_streams.add(stream)
self._seen_messages += 1
self._outqueue.append((timestamp, stream.src, stream.dst, msg))
context.bytes = context.bytes[idx + msglen:]
break
| {
"repo_name": "shrijeet/thrift-tools",
"path": "thrift_tools/stream_handler.py",
"copies": "1",
"size": "3281",
"license": "apache-2.0",
"hash": 5208742186786485000,
"line_mean": 30.854368932,
"line_max": 82,
"alpha_frac": 0.5352026821,
"autogenerated": false,
"ratio": 4.488372093023256,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.00011697274535033338,
"num_lines": 103
} |
"""A streaming dataflow pipeline to count pub/sub messages.
"""
from __future__ import absolute_import
import argparse
import logging
from datetime import datetime
from past.builtins import unicode
import apache_beam as beam
import apache_beam.transforms.window as window
from apache_beam.examples.wordcount import WordExtractingDoFn
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
from apache_beam.options.pipeline_options import StandardOptions
from apache_beam.io import WriteToText
class CountFn(beam.CombineFn):
def create_accumulator(self):
return 0
def add_input(self, count, input):
return count + 1
def merge_accumulators(self, accumulators):
return sum(accumulators)
def extract_output(self, count):
return count
def run(argv=None):
"""Build and run the pipeline."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--project',
help=('Google Cloud Project ID'),
required=True)
parser.add_argument(
'--input_topic',
help=('Google Cloud PubSub topic name '),
required=True)
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_options = PipelineOptions(
pipeline_args.append('--project={}'.format(known_args.project)))
pipeline_options.view_as(SetupOptions).save_main_session = True
pipeline_options.view_as(StandardOptions).streaming = True
p = beam.Pipeline(options=pipeline_options)
TOPIC = 'projects/{}/topics/{}'.format(known_args.project,known_args.input_topic)
table_spec = '{}:taxifare.traffic_realtime'.format(known_args.project) # table needs to exist
def to_bq_format(count):
"""BigQuery writer requires rows to be stored as python dictionary"""
return {'trips_last_5min':count,'time':datetime.now().strftime("%Y-%m-%d %H:%M:%S")}
pipeline = (p
| 'read_from_pubusub' >> beam.io.ReadFromPubSub(topic=TOPIC).with_output_types(bytes)
| 'window' >> beam.WindowInto(window.SlidingWindows(size=300,period=15))
| 'count' >> beam.CombineGlobally(CountFn()).without_defaults()
| 'format_for_bq' >> beam.Map(to_bq_format)
| 'write_to_bq' >> beam.io.WriteToBigQuery(
table_spec,
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND, #WRITE_TRUNCATE not supported for streaming
create_disposition=beam.io.BigQueryDisposition.CREATE_NEVER))
result = p.run()
#result.wait_until_finish() #only do this if running with DirectRunner
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run() | {
"repo_name": "GoogleCloudPlatform/training-data-analyst",
"path": "courses/machine_learning/deepdive/04_advanced_preprocessing/taxicab_traffic/streaming_count.py",
"copies": "2",
"size": "2620",
"license": "apache-2.0",
"hash": 6457042269957237000,
"line_mean": 33.038961039,
"line_max": 115,
"alpha_frac": 0.7129770992,
"autogenerated": false,
"ratio": 3.7110481586402266,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.022958445213546688,
"num_lines": 77
} |
# a string consisting of characters that are valid identifiers in both
# Python 2 and Python 3
import string
valid_ident = string.ascii_letters + string.digits + "_"
def str_to_identifier(s):
"""Convert a "bytes" to a valid (in Python 2 and 3) identifier."""
# convert str/bytes to unicode string
s = s.decode()
def filter_chars(s):
for c in s:
# periods are used for abbreviations and look ugly when converted
# to underscore, so filter them out completely
if c == ".":
yield ""
elif c in valid_ident or c == "_":
yield c
else:
yield "_"
if s[0] in string.digits:
s = "_"+s
return ''.join(filter_chars(s))
class Param(object):
"""A UI parameter object.
This objects represents a FAUST UI input. It makes sure to enforce the
constraints specified by the minimum, maximum and step size.
This object implements the descriptor protocol: reading it works just like
normal objects, but assignment is redirects to its "zone" attribute.
"""
def __init__(self, label, zone, init, min, max, step, param_type):
"""Initialise a Param object.
Parameters:
-----------
label : str
The full label as specified in the FAUST DSP file.
zone : cffi.CData
Points to the FAUSTFLOAT object inside the DSP C object.
init : float
The initialisation value.
min : float
The minimum allowed value.
max : float
The maximum allowed value.
step : float
The step size of the parameter.
param_type : str
The parameter type (e.g., HorizontalSlider)
"""
# NOTE: _zone is a CData holding a float*
self.label = label
self._zone = zone
self._zone[0] = init
self.min = min
self.max = max
self.step = step
self.type = param_type
# extra attributes
self.default = init
self.metadata = {}
self.__doc__ = "min={0}, max={1}, step={2}".format(min, max, step)
def __zone_getter(self):
return self._zone[0]
def __zone_setter(self, x):
if x >= self.max:
self._zone[0] = self.max
elif x <= self.min:
self._zone[0] = self.min
else:
self._zone[0] = self.min + round((x-self.min)/self.step)*self.step
zone = property(fget=__zone_getter, fset=__zone_setter,
doc="Pointer to the value of the parameter.")
def __set__(self, obj, value):
self.zone = value
class Box(object):
def __init__(self, label, layout):
self.label = label
self.layout = layout
self.metadata = {}
def __setattr__(self, name, value):
if name in self.__dict__ and hasattr(self.__dict__[name], "__set__"):
self.__dict__[name].__set__(self, value)
else:
object.__setattr__(self, name, value)
# TODO: implement the *Display() and *Bargraph() methods
class PythonUI(object):
"""
Maps the UI elements of a FAUST DSP to attributes of another object,
specifically a FAUST wrapper object.
In FAUST, UI's are specified by the DSP object, which calls methods of a UI
object to create them. The PythonUI class implements such a UI object. It
creates C callbacks to its methods and stores then in a UI struct, which
can then be passed to the buildUserInterface() function of a FAUST DSP
object.
The DSP object basically calls the methods of the PythonUI class from C via
the callbacks in the UI struct and thus creates a hierarchical namespace of
attributes which map back to the DSP's UI elements.
Notes:
------
Box and Param attributes are prefixed with "b_" and "p_", respectively, in
order to differentiate them from each other and from regular attributes.
Boxes and parameters without a label are given a default name of "anon<N>",
where N is an integer (e.g., "p_anon1" for a label-less parameter).
See also:
---------
FAUSTPy.Param - wraps the UI input parameters.
"""
def __init__(self, ffi, obj=None):
"""
Initialise a PythonUI object.
Parameters:
-----------
ffi : cffi.FFI
The CFFI instance that holds all the data type declarations.
obj : object (optional)
The Python object to which the UI elements are to be added. If
None (the default) the PythonUI instance manipulates itself.
"""
if obj:
self.__boxes = [obj]
else:
self.__boxes = [self]
self.__num_anon_boxes = [0]
self.__num_anon_params = [0]
self.__metadata = [{}]
self.__group_metadata = {}
# define C callbacks that know the global PythonUI object
@ffi.callback("void(void*, FAUSTFLOAT*, char*, char*)")
def declare(mInterface, zone, key, value):
self.declare(zone, ffi.string(key), ffi.string(value))
@ffi.callback("void(void*, char*)")
def openVerticalBox(mInterface, label):
self.openVerticalBox(ffi.string(label))
@ffi.callback("void(void*, char*)")
def openHorizontalBox(mInterface, label):
self.openHorizontalBox(ffi.string(label))
@ffi.callback("void(void*, char*)")
def openTabBox(mInterface, label):
self.openTabBox(ffi.string(label))
@ffi.callback("void(void*)")
def closeBox(mInterface):
self.closeBox()
@ffi.callback("void(void*, char*, FAUSTFLOAT*, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT)")
def addHorizontalSlider(ignore, c_label, zone, init, min, max, step):
label = ffi.string(c_label)
self.addHorizontalSlider(label, zone, init, min, max, step)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT)")
def addVerticalSlider(ignore, c_label, zone, init, min, max, step):
label = ffi.string(c_label)
self.addVerticalSlider(label, zone, init, min, max, step)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT, FAUSTFLOAT)")
def addNumEntry(ignore, c_label, zone, init, min, max, step):
label = ffi.string(c_label)
self.addNumEntry(label, zone, init, min, max, step)
@ffi.callback("void(void*, char*, FAUSTFLOAT*)")
def addButton(ignore, c_label, zone):
self.addButton(ffi.string(c_label), zone)
@ffi.callback("void(void*, char*, FAUSTFLOAT*)")
def addToggleButton(ignore, c_label, zone):
self.addToggleButton(ffi.string(c_label), zone)
@ffi.callback("void(void*, char*, FAUSTFLOAT*)")
def addCheckButton(ignore, c_label, zone):
self.addCheckButton(ffi.string(c_label), zone)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, int)")
def addNumDisplay(ignore, c_label, zone, p):
self.addNumDisplay(ffi.string(c_label), zone, p)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, char*[], FAUSTFLOAT, FAUSTFLOAT)")
def addTextDisplay(ignore, c_label, zone, names, min, max):
self.addTextDisplay(ffi.string(c_label), zone, names, min, max)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, FAUSTFLOAT, FAUSTFLOAT)")
def addHorizontalBargraph(ignore, c_label, zone, min, max):
label = ffi.string(c_label)
self.addHorizontalBargraph(label, zone, min, max)
@ffi.callback("void(void*, char*, FAUSTFLOAT*, FAUSTFLOAT, FAUSTFLOAT)")
def addVerticalBargraph(ignore, c_label, zone, min, max):
label = ffi.string(c_label)
self.addVerticalBargraph(label, zone, min, max)
# create a UI object and store the above callbacks as it's function
# pointers; also store the above functions in self so that they don't
# get garbage collected
ui = ffi.new("UIGlue*")
ui.declare = self.__declare_c = declare
ui.openVerticalBox = self.__openVerticalBox_c = openVerticalBox
ui.openHorizontalBox = self.__openHorizontalBox_c = openHorizontalBox
ui.openTabBox = self.__openTabBox_c = openTabBox
ui.closeBox = self.__closeBox_c = closeBox
ui.addHorizontalSlider = self.__addHorizontalSlider_c = addHorizontalSlider
ui.addVerticalSlider = self.__addVerticalSlider_c = addVerticalSlider
ui.addNumEntry = self.__addNumEntry_c = addNumEntry
ui.addButton = self.__addButton_c = addButton
ui.addToggleButton = self.__addToggleButton_c = addToggleButton
ui.addCheckButton = self.__addCheckButton_c = addCheckButton
ui.addNumDisplay = self.__addNumDisplay_c = addNumDisplay
ui.addTextDisplay = self.__addTextDisplay_c = addTextDisplay
ui.addHorizontalBargraph = self.__addHorizontalBargraph_c = addHorizontalBargraph
ui.addVerticalBargraph = self.__addVerticalBargraph_c = addVerticalBargraph
ui.uiInterface = ffi.NULL # we don't use this anyway
self.__ui = ui
self.__ffi = ffi
ui = property(fget=lambda x: x.__ui,
doc="The UI struct that calls back to its parent object.")
def declare(self, zone, key, value):
if zone == self.__ffi.NULL:
# set group meta-data
#
# the group meta-data is stored temporarily here and is set during
# the next openBox()
self.__group_metadata[key] = value
else:
# store parameter meta-data
#
# since the only identifier we get is the zone (pointer to the
# control value), we have to store this for now and assign it to
# the corresponding parameter later in closeBox()
if zone not in self.__metadata[-1]:
self.__metadata[-1][zone] = {}
self.__metadata[-1][zone][key] = value
##########################
# stuff to do with boxes
##########################
def openBox(self, label, layout):
# If the label is an empty string, don't do anything, just stay in the
# current Box
if label:
# special case the first box, which is always "0x00" (the ASCII
# Null character), so that it has a consistent name
if label.decode() == '0x00':
sane_label = "ui"
else:
sane_label = "b_"+str_to_identifier(label)
else:
# if the label is empty, create a default label
self.__num_anon_boxes[-1] += 1
sane_label = "b_anon" + str(self.__num_anon_boxes[-1])
# create a new sub-Box and make it a child of the current Box
box = Box(label, layout)
setattr(self.__boxes[-1], sane_label, box)
self.__boxes.append(box)
# store the group meta-data in the newly opened box and reset
# self.__group_metadata
self.__boxes[-1].metadata.update(self.__group_metadata)
self.__group_metadata = {}
self.__num_anon_boxes.append(0)
self.__num_anon_params.append(0)
self.__metadata.append({})
def openVerticalBox(self, label):
self.openBox(label, "vertical")
def openHorizontalBox(self, label):
self.openBox(label, "horizontal")
def openTabBox(self, label):
self.openBox(label, "tab")
def closeBox(self):
cur_metadata = self.__metadata.pop()
# iterate over the objects in the current box and assign the meta-data
# to the correct parameters
for p in self.__boxes[-1].__dict__.values():
# TODO: add the Display class (or whatever it will be called) to
# this list once *Display and *Bargraph are implemented
if type(p) not in (Param,):
continue
# iterate over the meta-data that has accumulated in the current
# box and assign it to its corresponding Param objects
for zone, mdata in cur_metadata.items():
if p._zone == zone:
p.metadata.update(mdata)
self.__num_anon_boxes.pop()
self.__num_anon_params.pop()
# now pop the box off the stack
self.__boxes.pop()
##########################
# stuff to do with inputs
##########################
def add_input(self, label, zone, init, min, max, step, param_type):
if label:
sane_label = str_to_identifier(label)
else:
# if the label is empty, create a default label
self.__num_anon_params[-1] += 1
sane_label = "anon" + str(self.__num_anon_params[-1])
setattr(self.__boxes[-1], "p_"+sane_label,
Param(label, zone, init, min, max, step, param_type))
def addHorizontalSlider(self, label, zone, init, min, max, step):
self.add_input(label, zone, init, min, max, step, "HorizontalSlider")
def addVerticalSlider(self, label, zone, init, min, max, step):
self.add_input(label, zone, init, min, max, step, "VerticalSlider")
def addNumEntry(self, label, zone, init, min, max, step):
self.add_input(label, zone, init, min, max, step, "NumEntry")
def addButton(self, label, zone):
self.add_input(label, zone, 0, 0, 1, 1, "Button")
def addToggleButton(self, label, zone):
self.add_input(label, zone, 0, 0, 1, 1, "ToggleButton")
def addCheckButton(self, label, zone):
self.add_input(label, zone, 0, 0, 1, 1, "CheckButton")
def addNumDisplay(self, label, zone, p):
pass
def addTextDisplay(self, label, zone, names, min, max):
pass
def addHorizontalBargraph(self, label, zone, min, max):
pass
def addVerticalBargraph(self, label, zone, min, max):
pass
| {
"repo_name": "marcecj/faust_python",
"path": "FAUSTPy/python_ui.py",
"copies": "1",
"size": "14040",
"license": "mit",
"hash": -5792233573113185000,
"line_mean": 34.5443037975,
"line_max": 104,
"alpha_frac": 0.5908831909,
"autogenerated": false,
"ratio": 3.9239798770262717,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0002513785636148505,
"num_lines": 395
} |
####elise@fnal.gov ##########
import numpy as np
import scipy.stats
import sys
from sklearn.covariance import GraphLassoCV, ledoit_wolf
#import os
#sys.path.append(os.getcwd())
import six
from .myutils import *
from .tolerance import *
from .variance import *
from .priors import *
from .model import *
from .io_utils import *
from .setup_mpi_mp import *
try:
from mpi4py import MPI
MPI = MPI
except ImportError:
MPI = None
def wrapper_func(i):
'''wrapper function to pass the correct sim pool to step() if mpi_splitcomm
Input:
tup_in: tuple (t = iteration number, Pid = particle id, weights, theta, variance, tol)
Output:
output from step() function
'''
if abcsampler.mpi_splitcomm:
return step(i,abcsampler.parallel.sim_pool)
else:
return step(i)
def step(info_in,sim_pool=None):
'''
Function for a single particle to propose a new point in parameter space
and accept it if rho(model(theta),data)<tol
Input:
tup_in: tuple (t = iteration number, Pid = particle id)
Returns:
trial_t - array of new parameters \theta
rho - distance rho(x,y)
'''
t, Pid,wgt,theta,variance,tol = info_in
tm1=t-1
while True:
if t ==0: #draw from prior
trial_t = [call_prior() for call_prior in abcsampler.prior]
if abcsampler.mpi_splitcomm:
x = abcsampler.model(trial_t,sim_pool)
else:
x = abcsampler.model(trial_t)
rho = abcsampler.dist(x)
else:
np.random.seed()
rpart = int(np.random.choice(abcsampler.npart,size=1,p=wgt[tm1]))
t_old = theta[tm1][rpart]
if abcsampler.variance_method == 4:
covariance = variance[Pid]
else:
covariance = variance
trial_t = np.atleast_1d(scipy.stats.multivariate_normal.rvs(mean= t_old,cov=covariance,size=1))
if abcsampler.mpi_splitcomm:
x = abcsampler.model(trial_t,sim_pool)
else:
x = abcsampler.model(trial_t)
rho = abcsampler.dist(x)
if rho <= tol[t]:
break
return trial_t,rho
class ABC_class(object):
'''Approximate Bayesian Computation Sequentual Monte Carlo class'''
def __init__(self,nparam,npart,data,tlevels,niter,priors,**kwargs):
'''
Input:
nparam: number of parameters to vary in sampler
npart: number of particles/walkers for Sequential Monte Carlo sampler
data: input data vector
tlevels: [max,min] tolerance levels max and min values
niter: number of iteration levels
priors: list of tuples (priorname, [hyperparameters for prior])
kwargs: dictionary of key words ; defaults given below
tol_type: tolerance level setting. Can be "linear","log", "exp", "const".
verbose: 0 = no output to screen, 1 = print out to screen
adapt_t: Boolean True/False for adaptive threshold setting
threshold: quantile level if adapt_t == True.
pert_kernel: 1 = component wise perturbation with local diag variance; 2 = multivariate perturbation based on local covariance
variance_method: 0=Weighted covariance, 1=Filippi et al 2012, eq 12 & 13, 2=Simple variance estimate (Turner & Van Zandt 2012), 3=Leodoit_Wolf, 4=k-nn
k_near: int, number of nearest neighbours for local covariance
dist_type: string, distance metric method. Default is "user"
dfunc: method name when dist_type == user"
datacov: string, data covariance file if needed in distance metric
outfile: string, name of output file
mpi: Boolean, True/False
mpi_splitcomm: Boolean, True/False
num_abc: int, number of procsessors for abc particles if split_mpicomm == True
mp: Boolean, True/False
num_proc: int, if mp=True
restart: string, name of restart file
from_restart: Boolean, True/False
'''
prop_defaults={"tol_type":"exp","verbose":0,'adapt_t':False,
'threshold':75,'pert_kernel':1,'variance_method':0,'k_near':5,'dist_type': "user",
'dfunc':None,'datacov':None,'outfile':'abc_out.txt','mpi': None, 'mp':None,'num_proc':None,'mpi_splitcomm':False,
'num_abc':None,'restart':None,'from_restart':False}
for (prop, default) in six.iteritems(prop_defaults):
setattr(self, prop, kwargs.get(prop, default))
if self.from_restart:
backup_files(self.outfile)
global abcsampler
abcsampler = self
#setup mpi or mp
self.parallel = Parallel(self.mpi, self.mp,self.mpi_splitcomm, self.num_proc,self.num_abc,self.verbose)
check_input(nparam,npart,priors,tlevels[0],tlevels[1],self.dist_type,self.datacov,self.dfunc)
self.data = data
self.npart = npart
self.nparam = nparam
self.niter = niter
self.tmin = tlevels[1]
self.allocate()
self.tol = Tolerance(self.tol_type,tlevels[1],tlevels[0],niter).tol
if self.variance_method ==1:
self.Variance = Filippi(nparam,npart,self.pert_kernel)
elif self.variance_method ==2:
self.Variance = TVZ(nparam,self.pert_kernel)
elif self.variance_method ==3:
self.Variance = Leodoit_Wolf(nparam,self.pert_kernel)
elif self.variance_method == 4:
self.Variance = nearest_neighbour(nparam,npart,self.pert_kernel)
else: #default
self.Variance = weighted_cov(nparam,npart,self.pert_kernel)
self.unpack_priors(priors)
self.end_sampling = False
if self.verbose and self.parallel.master==0:
print_header(npart,niter,self.tol_type,tlevels,priors)
def unpack_priors(self,priors):
'''
Parameters of interest, theta, may have different priors from Priors class
input:
priors = list of ['name of prior', [hyperparams of prior]] for each t in theta,
currently, gamma, uniform and normal supported.
returns:
methods from instances of Prior class which allow sample to generate rvs
using self.prior() and pdf(number) using self.priorprob(number)
'''
self.prior = np.empty(self.nparam,dtype=np.object)
self.priorprob = np.empty(self.nparam,dtype=np.object)
for i,p in enumerate(priors):
pcls = Prior_class(p[0],p[1])
self.prior[i] = np.vectorize(pcls.prior)
self.priorprob[i]=np.vectorize(pcls.return_priorprob)
def allocate(self):
'''allocate numpy arrays for parameter values, weights and distances'''
self.theta=np.zeros([self.niter,self.npart,self.nparam])
self.wgt=np.zeros([self.niter,self.npart])
self.Delta=np.zeros([self.niter,self.npart])
def dist(self,x):
'''distance metric function
Input:
x - simulationed data sample at proposed parameter value
Returns:
distance to be compared to the threshold at iteration t
'''
if self.dist_type == "mean":
return np.sum(np.abs(np.mean(x,axis=0) - np.mean(self.data,axis=0)))
if self.dist_type == "chi2":
d=x-self.data
return np.dot(d,np.dot(self.datacov,d))
if self.dist_type == "user":
return self.dfunc(self.data,x)
def sample(self,model_simulator):
'''
Begin sampling
Input:
model_simulator - func which simulates data
'''
self.model=model_simulator
#if self.parallel.rank ==0:print "\t Running sampler...\t "
if self.from_restart:
t,th,wgt,dist=read_restart_files(self.restart,self.nparam,self.npart)
self.theta[t] = th ; self.wgt[t]=wgt; self.Delta[t] = dist
if self.adapt_t and t <self.niter-1:
self.tol[t+1]=self.iteratively_adapt(t)
ctr = t+1
else:
ctr=0
while self.end_sampling == False:
if self.mpi_splitcomm:
if self.parallel.rank in self.parallel.abc_ranks:
ctr = self.sample_loop(ctr)
else: #worker node for sim, wait until sim pool is closed
self.parallel.sim_pool.worker()
self.end_sampling = True
else:
ctr = self.sample_loop(ctr)
if self.mpi or self.mp:
if self.mpi_splitcomm:
if self.parallel.rank in self.parallel.abc_ranks:
self.parallel.pool.close()
if self.parallel.rank !=0:
self.parallel.sim_pool.close()
else:
self.parallel.pool.close()
def sample_loop(self,t):
'''
At each iteration t:
-each particle finds a point in parameter space that satifies rho(model(theta),data)<tol
-weights are calculated for each particle
-variances of parameters calculated
-weights are normalized
input:
iter t
'''
if t+1 == self.niter or self.tol[t] == self.tmin:
self.end_sampling = True
if not(t):
self.wgt[t] =1./self.npart
if t:
self.variance = self.calculate_variance(t)
else:
self.variance =0
if self.mpi or self.mp:
if self.mp:
pool_outputs = self.parallel.pool.map(self.classstep, zip([t]*(self.npart),range(self.npart)))
else:
pool_outputs = self.parallel.pool.map(wrapper_func, zip([t]*(self.npart),\
range(self.npart),[self.wgt]*(self.npart),\
[self.theta]*(self.npart),[self.variance]*(self.npart), [self.tol]*(self.npart)))
for i in range(self.npart):
if pool_outputs: # prevent error when mpi worker pool is closed
self.theta[t][i],self.Delta[t][i] = pool_outputs[i]
else:
self.end_sampling = True
return #pool is closed so worker just returns
if t:
self.wgt[t] = self.parallel.pool.map(self.particle_weight, zip([t]*(self.npart),range(self.npart)))
else:
for i in np.arange(self.npart):
self.theta[t][i],self.Delta[t][i] = step((t,i, self.wgt, self.theta, self.variance, self.tol))
if t:
self.wgt[t][i] = self.particle_weight((t,i))
#normalize
self.wgt[t] = self.wgt[t]/np.sum(self.wgt[t])
if self.outfile and self.parallel.master==0:
write_to_file(t,self.outfile,self.nparam,self.npart,self.theta[t],self.Delta[t],self.wgt[t])
if self.restart and t:
write_restart_file(self.restart,t,self.theta[t],self.wgt[t],self.Delta[t],self.nparam,self.npart)
sys.stdout.flush()
if self.verbose and self.parallel.master==0:
print("\t Step:",t,"\t tol:",self.tol[t],"\t Params:",[np.mean(self.theta[t][:,ii]) for ii in range(self.nparam)])
if self.adapt_t and t <self.niter-1:
self.tol[t+1]=self.iteratively_adapt(t)
return t+1
def calculate_variance(self,t):
if self.variance_method ==1:
return self.Variance.get_var(t,self.theta[t-1],self.Delta[t-1],self.tol[t-1],self.wgt[t-1])
elif self.variance_method ==2 or self.variance_method ==3:
return self.Variance.get_var(t,self.theta[t-1])
elif self.variance_method == 4:
return self.Variance.get_var(t,self.theta[t-1],self.wgt[t-1],self.k_near)
else:
return self.Variance.get_var(t,self.theta[t-1],self.wgt[t-1])
def iteratively_adapt(self,t):
'''
Drovandi & Pettitt 2011, use qth quantileof the distance for t iterations
unless we hit the tmin requested
'''
new_tol= np.percentile(self.Delta[t], self.threshold)
if new_tol < self.tmin:
new_tol = self.tmin
return new_tol
def particle_weight(self,tup_in):
'''
At each iteraction, t, this method calculates the weight of particle i
at t not equal to 0 weight is calculated according to kernel.
input:
tup_in is a tuple of (iter t,particle id Pid)
'''
t, Pid = tup_in
tm1 = t-1
# apply kernel to each particle's parameter vector
Kf = self.kernel(Pid,t)
kernels = Kf(self.theta[t-1])
if np.any(self.wgt[tm1]) ==0 or np.any(kernels)==0:
print("Error computing Kernel or weights...", kernels, self.wgt[tm1])
sys.exit(1)
priorproduct = np.prod([f[0](f[1]) for f in np.vstack((self.priorprob,self.theta[t][Pid])).T])
return priorproduct/(np.sum(self.wgt[tm1]*kernels))
def kernel(self,Pid,t):
if self.variance_method == 4:
covariance = self.variance[Pid]
else:
covariance = self.variance
if np.linalg.det(covariance) <1.E-15:
#maybe singular matrix; check diagonals for small values
#if self.verbose:
# print "Variance is a singular matrix", covariance
# print "using l2 shrinkage with the Ledoit-Wolf estimator..."
covariance, _ = ledoit_wolf(self.theta[t])
return scipy.stats.multivariate_normal(mean=self.theta[t][Pid],cov=covariance,allow_singular=True).pdf
def classstep(self,info_in):
'''
if mp==True.
copy of step function above; messy but necessary for mp which can't access global abcsampler -EJ
'''
t, Pid = info_in
tm1=t-1
while True:
if t ==0: #draw from prior
trial_t = [call_prior() for call_prior in self.prior]
x = self.model(trial_t)
rho = self.dist(x)
else:
np.random.seed()
rpart = int(np.random.choice(self.npart,size=1,p=self.wgt[tm1]))
t_old = self.theta[tm1][rpart]
if self.variance_method == 4:
covariance = self.variance[Pid]
else:
covariance = self.variance
trial_t = np.atleast_1d(scipy.stats.multivariate_normal.rvs(mean= t_old,cov=covariance,size=1))
x = self.model(trial_t)
rho = self.dist(x)
if rho <= self.tol[t]:
break
return trial_t,rho
| {
"repo_name": "EliseJ/astroABC",
"path": "astroabc/abc_class.py",
"copies": "1",
"size": "18205",
"license": "mit",
"hash": 8122778185513432000,
"line_mean": 47.1613756614,
"line_max": 174,
"alpha_frac": 0.4697610547,
"autogenerated": false,
"ratio": 4.426209579382446,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.015443159250793323,
"num_lines": 378
} |
#!/astro/apps/pkg/python/bin/python
import numpy
import pyfits
import pylab
import os
class SDSSfits:
def __init__(self,SDSS_fitfile = None,LINES=[]):
self.spectrum = None
self.D = {}
self.z = 0
self.coeff0 = 0
self.coeff1 = 0
self.name = ""
if SDSS_fitfile != None:
hdulist = pyfits.open(SDSS_fitfile)
self.spectrum = hdulist[0].data[0]
self.name = hdulist[0].header['NAME']
self.D['N_BAD_PIX'] = sum(hdulist[0].data[2]==0)
for key in ('Z','Z_ERR','COEFF0','COEFF1',
'SPEC_CLN','MAG_G','MAG_R','MAG_I'):
self.D[key] = hdulist[0].header[key]
self.D['TARGET'] = SDSS_fitfile
#get line data
hdu_line_list = dict([(line,-1) for line in LINES])
if hdulist[2].data == None:
self.numlines = 0
else:
self.numlines = hdulist[2].data.shape[0]
for i in range(self.numlines):
line_w = hdulist[2].data.field('restWave')[i]
line_str = '%.2f' % line_w
if line_str in LINES:
hdu_line_list[line_str] = i
for line in LINES:
i = hdu_line_list[line]
#compute flux and dflux, get nsigma
if i<0:
sig = 0
dsig = 0
height = 0
dheight = 0
nsigma = 0
else:
sig = hdulist[2].data.field('sigma')[i]
dsig = hdulist[2].data.field('sigmaErr')[i]
height = hdulist[2].data.field('height')[i]
dheight = hdulist[2].data.field('heightErr')[i]
nsigma = hdulist[2].data.field('nsigma')[i]
if sig==-9999: sig=0
if height==-9999: height=0
if nsigma==-9999: nsigma=0
self.D[line+'_flux'] = sig*height
self.D[line+'_dflux'] = sig*dheight + dsig*height
self.D[line+'_width'] = sig
self.D[line+'_dwidth'] = dsig
self.D[line+'_nsigma'] = nsigma
self.z = self.D['Z']
self.coeff0 = self.D['COEFF0']
self.coeff1 = self.D['COEFF1']
hdulist.close()
def plot(self,*args,**kwargs):
pylab.plot(self.wavelength(),self.spectrum,*args,**kwargs)
def log_w_min(self,i=None):
"""
if i is specified, return log_w_min of bin i
otherwise, return log_w_min of the spectrum
"""
if i==None: i=0
return self.coeff0 + (i-0.5)*self.coeff1
def log_w_max(self,i=None):
"""
if i is specified, return log_w_max of bin i
otherwise, return log_max of the spectrum
"""
if i==None: i=len(self)-1
return self.coeff0 + (i+0.5)*self.coeff1
def w_min(self,i=None):
return 10**self.log_w_min(i)
def w_max(self,i=None):
return 10**self.log_w_max(i)
def wavelength(self):
return 10** (self.coeff0+self.coeff1*numpy.arange(len(self)))
def __len__(self):
return len(self.spectrum)
def remove_O_lines(self):
"""
removes strong Oxygen line at 5577A, 6300A, and 6365A.
This basically does a linear interpolation across those
regions
"""
for line in (5577,6300,6365):
lmin = line-10
lmax = line+10
imin = int( numpy.floor( (numpy.log10(lmin)-self.coeff0)/self.coeff1 ) )
imax = int( numpy.ceil( (numpy.log10(lmax)-self.coeff0)/self.coeff1 ) )
if imin<0 or imax>=len(self.spectrum):
print "warning: line %i out of range\n" % line
s0 = self.spectrum[imin]
ds_di = (self.spectrum[imax] - self.spectrum[imin])/(imax-imin)
for i in range(imin+1,imax):
self.spectrum[i] = s0 + (i-imin)*ds_di
def move_to_restframe(self):
if (self.z < 0):
print "warning: negative redshift! Can't move to rest frame"
self.coeff0 -= numpy.log10(1+self.z)
self.z = 0.0
def rebin(self,rebin_coeff0,rebin_coeff1,rebin_length):
snew = self.__class__()
snew.spectrum = numpy.zeros(rebin_length)
snew.z = self.z
snew.coeff0 = rebin_coeff0
snew.coeff1 = rebin_coeff1
snew.D = self.D.copy()
if ( self.log_w_min() > snew.log_w_min()+1E-10 ):
raise ValueError,"rebin: new_min preceeds old_min"
if ( self.log_w_max() < snew.log_w_max()-1E-10 ):
raise ValueError,"rebin: new_max exceeds old_max"
for i in range(len(snew)):
log_wi_new = snew.coeff0 + i*snew.coeff1
log_w_min_i = snew.log_w_min(i)
log_w_max_i = snew.log_w_max(i)
#integrate old flux within this range
binmin_j = numpy.floor((log_w_min_i - self.coeff0)/self.coeff1+ 0.5)
binmax_j = numpy.ceil ((log_w_max_i - self.coeff0)/self.coeff1+ 0.5)
if binmin_j == -1:
binmin_j = 0
elif binmin_j < -1:
raise ValueError, "error: binmin_j too small"
if binmax_j == len(self)+1:
binmax_j = len(self)
elif binmax_j > len(self)+1:
raise ValueError, "error: binmax_j too large"
snew.spectrum[i]=0
for j in range( int(binmin_j), int(binmax_j) ):
log_w_min_j = max(log_w_min_i, self.log_w_min(j) )
log_w_max_j = min(log_w_max_i, self.log_w_max(j) )
snew.spectrum[i] += self.spectrum[j] * \
(10**(log_w_max_j) - 10**(log_w_min_j))
snew.spectrum[i] /= (10**(log_w_max_i) - 10**(log_w_min_i))
return snew
def integrate(self,w_min = None, w_max = None):
"""
note that wmin and wmax are in linear angstroms,
not log angstroms!
"""
if w_min == None:
log_w_min = self.log_w_min()
else:
log_w_min = numpy.log10(w_min)
if w_max == None:
log_w_max = self.log_w_max()
else:
log_w_max = numpy.log10(w_max)
if (log_w_min < self.log_w_min()-1E-10) or\
(log_w_max > self.log_w_max()+1E-10):
print "Error: integrate(): integration bounds exceed spectrum range"
print "Abort"
exit(-1)
i_min = numpy.floor((log_w_min - self.coeff0)/self.coeff1+ 0.5)
i_max = numpy.ceil ((log_w_max - self.coeff0)/self.coeff1+ 0.5)
if(i_min == -1):
i_min += 1
if(i_max == len(self)+1):
i_max -= 1
tot_flux = 0
for i in range(int(i_min),int(i_max)):
log_w_min_i = max(log_w_min, self.log_w_min(i) )
log_w_max_i = min(log_w_max, self.log_w_max(i) )
tot_flux += self.spectrum[i] * (10**log_w_max_i - 10**log_w_min_i)
return tot_flux
def normalize(self,N=1):
I = self.integrate()
if self.integrate()<=0:
raise ValueError, "error: cannot normalize. I=0"
self.spectrum /= self.integrate()
self.spectrum *= N
if __name__ == '__main__':
FIT_FILE = 'spSpec-51630-0266-104.fit'
spec = SDSSfits(FIT_FILE)
print spec.w_min(), spec.w_max()
spec.plot(c='#AAAAAA')
#spec.remove_O_lines()
#spec.move_to_restframe()
rebin_length = 1000
factor = (len(spec) - 1.0)/rebin_length
rebin_coeff1 = factor * spec.coeff1
rebin_coeff0 = spec.coeff0 + 0.5*factor*spec.coeff1
spec_rebin = spec.rebin(rebin_coeff0,rebin_coeff1,rebin_length)
spec_rebin.plot(c='b')
pylab.show()
| {
"repo_name": "excelly/xpy-ml",
"path": "sdss/jake_lib/SDSSfits.py",
"copies": "1",
"size": "8209",
"license": "apache-2.0",
"hash": 7622193906198012000,
"line_mean": 31.1921568627,
"line_max": 84,
"alpha_frac": 0.4814228286,
"autogenerated": false,
"ratio": 3.3020917135961385,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.42835145421961385,
"avg_score": null,
"num_lines": null
} |
#!/astro/apps/pkg/python/bin/python
import numpy
import pylab
from matplotlib import ticker#, axes3d
import pyfits
import os
def S(theta):
"""
returns x,y
a 2-dimensional S-shaped function
for theta ranging from 0 to 1
"""
t = 3*numpy.pi * (theta-0.5)
x = numpy.sin(t)
y = numpy.sign(t)*(numpy.cos(t)-1)
return x,y
def rand_on_S(N,sig=0,hole=False,outliers=0):
t = numpy.random.random(N)
x,z = S(t)
y = numpy.random.random(N)*5.0
if sig:
x += numpy.random.normal(scale=sig,size=N)
y += numpy.random.normal(scale=sig,size=N)
z += numpy.random.normal(scale=sig,size=N)
if outliers:
x[:outliers] = -1.2+2.4*numpy.random.random(outliers)
y[:outliers] = -0.2+5.4*numpy.random.random(outliers)
z[:outliers] = -2.2+4.4*numpy.random.random(outliers)
t[:outliers] = 0
if hole:
indices = numpy.where( ((0.3>t) | (0.7<t)) | ((1.0>y) | (4.0<y)) )
#indices = numpy.where( (0.3>t) | ((1.0>y) | (4.0<y)) )
return x[indices],y[indices],z[indices],t[indices]
else:
return x,y,z,t
def scatter_2D(x,y,t=None,cmap=pylab.cm.jet,clim=None,
xlabel=None,ylabel=None,title=""):
fig = pylab.figure()
if t==None:
cb = pylab.scatter(x,y,s=12.0,linewidths=0)
else:
cb = pylab.scatter(x,y,c=t,cmap=cmap,s=12.0,linewidths=0)
if xlabel==None:
xlabel = 'x'
if ylabel==None:
ylabel='y'
pylab.xlabel(xlabel)
pylab.ylabel(ylabel)
pylab.title(title)
pylab.colorbar(cb)
if clim!=None:
pylab.clim(clim[0],clim[1])
return fig
def scatter_3D(x,y,z,t=None,cmap=pylab.cm.jet,clim=None,
xlabel=None,ylabel=None,zlabel=None,title=None):
fig = pylab.figure()
ax = axes3d.Axes3D(fig)
if t==None:
cb = ax.scatter3D(x,y,z,s=12.0,linewidths=0)
else:
cb = ax.scatter3D(x,y,z,c=t,cmap=cmap,s=12.0,linewidths=0)
#if x.min()>-2 and x.max()<2:
# ax.set_xlim(-2,2)
if xlabel==None:
xlabel = 'x'
if ylabel==None:
ylabel='y'
if zlabel==None:
zlabel='z'
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_zlabel(zlabel)
ax.set_title(title)
# elev, az
ax.view_init(10, -80)
cb = pylab.colorbar(cb)
if clim!=None:
cb.set_clim(clim[0],clim[1])
return fig
def WriteMatrixFile(M,filename,description=""):
D,N = M.shape
of = open(filename,'w')
of.write("N_POINTS:\t%i\n" % N)
of.write("DIMENSION:\t%i\n" % D)
of.write("DATA:\n")
for s in description.split('\n'):
of.write('# %s\n' % s)
for j in range(N):
for i in range(D):
try:
of.write("%g\t" % M[i,j])
except:
of.write("%s\t" % M[i,j])
of.write("\n")
of.close()
def WriteVectorFile(V,filename,description=""):
N = len(V)
of = open(filename,'w')
of.write("N_POINTS:\t%i\n" % N)
of.write("DATA:\n")
for s in description.split('\n'):
of.write('# %s\n' % s)
for i in range(N):
of.write("%g\n" % V[i])
of.close()
def ReadMatrixFile(filename):
F = open(filename,'r')
nonfloats = []
N = None
D = None
M = None
for line in F:
if line.strip().startswith("#"):
continue
elif line.startswith("N_POINTS:"):
N = int(line.split()[1])
elif line.startswith("DIMENSION:"):
D = int(line.split()[1])
elif line.startswith("DATA:"):
if (N==None) or (D==None):
raise ValueError, "Invalid file format: %s" % filename
else:
M = numpy.zeros([D,N])
i=0
elif (M is None):
raise ValueError, "Invalid file format: %s" % filename
else:
line=line.split()
for j in range(len(line)):
try:
M[j,i] = float(line[j])
except ValueError:
if str(j) not in nonfloats:
nonfloats.append(str(j))
M[j,i] = -888
i+=1
if len(nonfloats) != 0:
print "%s: non-float values found in columns %s" %\
(filename,', '.join(nonfloats))
if i!=N:
raise ValueError, "Not enough data points in %s" % filename
return M
def ReadVectorFile(filename):
F = open(filename,'r')
N = None
V = None
for line in F:
if line.startswith("#"):
continue
elif line.startswith("N_POINTS:"):
N = int(line.split()[1])
elif line.startswith("DIMENSION:"):
print "ReadVectorFile: Warning: keyword DIMENSION found"
print " %s is a matrix file" % filename
elif line.startswith("DATA:"):
if (N==None):
raise ValueError, "Invalid file format: %s" % filename
else:
V = numpy.zeros(N)
i=0
elif (V is None):
raise ValueError, "Invalid file format: %s" % filename
else:
V[i] = float ( line.split()[0] )
i+=1
if i!=N:
raise ValueError, "Not enough data points in %s" % filename
return V
def getMatrixTags(filename):
N = None
for line in open(filename):
if line.startswith('DIMENSION'):
N = int(line.split()[1])
if line.startswith('#') and N!=None:
L = line.lstrip('#').split()
if len(L) == N:
return L
else:
return None
if line.startswith('DATA'):
return None
return None
def getFITSMatrix(filename,tag=None):
"""
from a fits file <filename> get the matrix in HDU specified by tag
if tag is an integer or string representation of an integer,
it is assumed to be the index of the HDU (starting from 1)
if tag is a string, then routine checks for an HDU with
header['EXTNAME'] == <tag>
"""
hdulist = pyfits.open(filename)
if tag in [None,""]:
HDUnum = 0
else:
if type(tag) == type(1):
HDUnum = tag-1
elif type(tag) == type('abc'):
if tag.isdigit():
HDUnum = int(tag)-1
else:
HDUnum = -1
for i in range(len(hdulist)):
try:
extname = hdulist[i].header['EXTNAME']
except:
continue
if extname.upper()==tag.upper():
HDUnum = i
break
if HDUnum==-1:
raise ValueError, "%s not found in %s" % (tag,filename)
return hdulist[HDUnum].data
def getFITSLambdas(filename):
hdulist = pyfits.open(filename)
coeff0 = hdulist[0].header['COEFF0']
coeff1 = hdulist[0].header['COEFF1']
N = hdulist[0].header['NAXIS1']
return 10**(coeff0 + coeff1*numpy.arange(N))
def getFITSInfo(filename,infotag="",infohdu=1):
hdulist = pyfits.open(filename)
return hdulist[infohdu].data.field(infotag)
def create_fits(filename,spectra,**kwargs):
"""
kwargs are in the form LABEL=DATATYPE
"""
hdu = pyfits.PrimaryHDU()
hdu.data = numpy.asarray(spectra)
N = spectra.shape[0]
Collist = []
keys = kwargs.keys()
keys.sort() #arrange keys alphabetically
for key in keys:
try:
L = len(kwargs[key])
except TypeError:
L = 1
if L==1:
hdu.header.update(key.upper(),kwargs[key])
elif L==N:
format = numpy.asarray(kwargs[key]).dtype
if format in ('int32','int64'):
format = 'J'
elif format in ('float32','float64'):
format = 'E'
elif str(format).startswith('|S'):
format = 'A'+str(format)[2:]
else:
s = "unrecognized format: %s" % format
raise ValueError, s
Collist.append( pyfits.Column(name=key,
format=format,
array=kwargs[key]) )
else:
raise ValueError, "create_fits: key %s size does not match number of points"
tbdhdu = pyfits.new_table(Collist)
hdulist = pyfits.HDUList([hdu,tbdhdu])
#clobber means overwrite existing file
hdulist.writeto(filename,clobber=True)
def parse_FITS_filename(filename):
"""
if filename is of the form '/path/to/myfile.fits+1'
returns ('/path/to/myfile.fits','1')
if filename is of the form '/path/to/myfile.fits[LABEL]'
returns ('/path/to/myfile.fits','LABEL')
aborts if filename cannot be parsed
"""
filename.rstrip()
L = len(filename)
if filename[L-1] == ']':
L = filename[:-1].split('[')
if len(L)==1:
raise ValueError, "%s cannot be parsed" % filename
else:
return '['.join(L[:-1]),L[-1]
#i=L-2
#while i>=0:
# if filename[i]=='[':
# return filename[:i],filename[i+1:L-1]
# i-=1
#raise ValueError, "%s cannot be parsed" % filename
if '+' in filename:
L = filename.split('+')
if L[-1].isdigit():
return '+'.join(L[:-1]),L[-1]
else:
return filename,""
return filename,""
def check_if_it_works():
D = {}
D['target'] = ['Target1','Target2']
D['Z'] = [0.1,0.2]
D['DZ'] = [0.005]
for LINE in LINES:
for info in ('flux','dflux','nsigma'):
D[LINE+'_'+info] = [0.1,0.2]
spectrum = [[1,2,3,4],[5,6,7,8]]
D["COEFF0"] = 3.7
D["COEFF1"] = 0.0001
create_fits('tmp.fits',spectrum,**D)
#check on the data
hdulist = pyfits.open('tmp.fits')
print hdulist[1].data.field('target')
print hdulist[1].data.field('Z')
print hdulist[1].data.field('DZ')
hdulist.close()
if __name__ == '__main__':
#filename = "/local/tmp/sdss_spec_2/1500.dat"
#print filename
#print parse_FITS_filename(filename)
#exit()
N=2000
o=0
if o:
filename = 'test_data/S%io%i.fits' % (N,o)
else:
filename = 'test_data/S%i.fits' % (N)
print filename
x,y,z,t = rand_on_S(N,sig=0,hole=False,outliers=0)
x[:o] = 2.4 * numpy.random.random(o)-1.2
y[:o] = 5 * numpy.random.random(o)
z[:o] = 4 * numpy.random.random(o)-2
t[:o] = 0
M = numpy.array([x,y,z]).T
#WriteMatrixFile(M,'S%io%i.dat' % (N,o))
#WriteMatrixFile(M1,'S%io%i-t.dat' % (N,o))
create_fits(filename,M,**{'t':t,'coeff0':1})
| {
"repo_name": "excelly/xpy-ml",
"path": "sdss/jake_lib/tools.py",
"copies": "1",
"size": "10932",
"license": "apache-2.0",
"hash": 7038385707113477000,
"line_mean": 24.7830188679,
"line_max": 88,
"alpha_frac": 0.5075009147,
"autogenerated": false,
"ratio": 3.275014979029359,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9167269049336897,
"avg_score": 0.023049368878492356,
"num_lines": 424
} |
#!/astro/apps/pkg/python/bin/python
import pyfits
import SDSSfits
import numpy
from tools import create_fits
import os
def main(OUT_DIR = "/astro/net/scratch1/vanderplas/SDSS_GAL_RESTFRAME/",
DIR_ROOT = "/astro/net/scratch1/sdssspec/spectro/1d_26/*/1d",
LINES_FILE = "LINES_SHORT.TXT",
z_min = 0.0, #zmax is set such that SII lines will
z_max = 0.36, # fall in range of 3830 to 9200 angstroms
rebin_coeff0 = 3.583, # rebin parameters give a wavelength
rebin_coeff1 = 0.0002464, # range from 3830A to 9200A
rebin_length = 1000,
remove_sky_absorption = True,
normalize = True):
LINES = []
KEYS = ['TARGET','Z','Z_ERR','SPEC_CLN','MAG_G','MAG_R','MAG_I','N_BAD_PIX']
if LINES_FILE is not None:
for line in open(LINES_FILE):
line = line.split()
if len(line)==0:continue
W = float(line[0])
if W<3000 or W>7000:continue
LINES.append('%.2f'%W)
for info in ('flux','dflux','width','dwidth','nsigma'):
KEYS.append('%.2f_%s' % (W,info) )
for SET in os.listdir(DIR_ROOT.split('*')[0]):
if not SET.isdigit():
continue
DIR = DIR_ROOT.replace('*',SET)
if not os.path.exists(DIR):
continue
OUT_FILE = os.path.join(OUT_DIR,SET+'.dat')
print 'writing %s' % os.path.join(OUT_DIR,SET+'.dat')
col_dict = dict([(KEY,[]) for KEY in KEYS])
spec_list = []
NUMS = []
for F in os.listdir(DIR):
if not F.endswith('.fit'): continue
num = int( F.strip('.fit').split('-')[-1] )
if num in NUMS:
#print " - already measured: skipping %s" % F
continue
#open hdu file and glean necessary info
SPEC = SDSSfits.SDSSfits(os.path.join(DIR,F),LINES)
if SPEC.D['SPEC_CLN'] not in (1,2,3,4):
continue
if SPEC.z<z_min:
#print " - negative z: skipping %s" % F
continue
if SPEC.z>z_max:
#print " - z>z_max: skipping %s" % F
continue
if SPEC.numlines == 0:
#print " - no line measurements: skipping %s" % F
continue
if remove_sky_absorption:
#cover up strong oxygen absorption
SPEC.remove_O_lines()
#move to restframe, rebin, and normalize
SPEC.move_to_restframe()
try:
SPEC = SPEC.rebin(rebin_coeff0,rebin_coeff1,rebin_length)
except:
print " rebin failed. Skipping %s" % F
continue
if normalize:
try:
SPEC.normalize()
except:
print " normalize failed. Skipping %s" % F
continue
if min(SPEC.spectrum) < -4*max(SPEC.spectrum):
print " goes too far negative. Skipping %s" % F
NUMS.append(num)
spec_list.append(SPEC.spectrum.tolist())
for KEY in KEYS:
col_dict[KEY].append(SPEC.D[KEY])
del SPEC
if os.path.exists(OUT_FILE):
os.system('rm %s' % OUT_FILE)
col_dict['coeff0'] = rebin_coeff0
col_dict['coeff1'] = rebin_coeff1
create_fits(OUT_FILE,numpy.asarray( spec_list ),**col_dict)
print " - wrote %i spectra" % len(NUMS)
if __name__ == '__main__':
main(OUT_DIR = "/astro/net/scratch1/vanderplas/SDSS_GAL_RESTFRAME/",
DIR_ROOT = "/astro/net/scratch1/sdssspec/spectro/1d_26/*/1d",
#LINES_FILE = "LINES_SHORT.TXT",
LINES_FILE = None,
z_min = 0.0, #zmax is set such that SII lines will
z_max = 0.36, # fall in range of 3830 to 9200 angstroms
rebin_coeff0 = 3.583, # rebin parameters give a wavelength
rebin_coeff1 = 0.0002464, # range from 3830A to 9200A
rebin_length = 1000,
remove_sky_absorption = False,
normalize = False)
| {
"repo_name": "excelly/xpy-ml",
"path": "sdss/jake_lib/make_condensed_fits.py",
"copies": "1",
"size": "4199",
"license": "apache-2.0",
"hash": -4041813316859034000,
"line_mean": 31.8046875,
"line_max": 80,
"alpha_frac": 0.5103596094,
"autogenerated": false,
"ratio": 3.408279220779221,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4418638830179221,
"avg_score": null,
"num_lines": null
} |
# A strobogrammatic number is a number that looks the same when rotated 180 degrees (looked at upside down).
#
# Find all strobogrammatic numbers that are of length = n.
#
# For example,
# Given n = 2, return ["11","69","88","96"].
#
# Hint:
#
# Try to use recursion and notice that it should recurse with n - 2 instead of n - 1.
class Solution(object):
def findStrobogrammatic(self, n):
"""
:type n: int
:rtype: List[str]
"""
return self.recurse(n, n)
def recurse(self, n, length):
if n == 0:
return ['']
elif n == 1:
return ['0', '1', '8']
else:
result = []
middles = self.recurse(n - 2, length)
for middle in middles:
result.append("8" + middle + "8")
result.append("1" + middle + "1")
result.append("9" + middle + "6")
result.append("6" + middle + "9")
if n != length:
result.append("0" + middle + "0")
return result
def iterative(self, n):
if n == 0:
return ['']
elif n == 1:
return ['0', '1', '8']
else:
if n % 2 == 1:
result = ['0', '1', '8']
r = xrange(1, n, 2)
else:
result = ['']
r = xrange(0, n, 2)
for i in r:
j = len(result)
while j > 0:
middle = result.pop(0)
result.append("8" + middle + "8")
result.append("1" + middle + "1")
result.append("9" + middle + "6")
result.append("6" + middle + "9")
if i != n - 2:
result.append("0" + middle + "0")
j -= 1
return result
# Note:
# Recursing with n-2 as we are going to add 2 numbers every time.
| {
"repo_name": "jigarkb/Programming",
"path": "LeetCode/247-M-StrobogrammaticNumberII.py",
"copies": "2",
"size": "1965",
"license": "mit",
"hash": 9219716216263987000,
"line_mean": 28.7727272727,
"line_max": 108,
"alpha_frac": 0.4254452926,
"autogenerated": false,
"ratio": 3.8529411764705883,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0003151851348992377,
"num_lines": 66
} |
# Astrocrash01
# Get asteroids moving on the screen
import random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash01.py",
"copies": "1",
"size": "1753",
"license": "mit",
"hash": -9193152681968067000,
"line_mean": 26.8253968254,
"line_max": 81,
"alpha_frac": 0.5750142613,
"autogenerated": false,
"ratio": 3.390715667311412,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44657299286114116,
"avg_score": null,
"num_lines": null
} |
# Astrocrash02
# Get asteroids moving on the screen
import random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
class Ship(games.Sprite):
""" The player's ship. """
image = games.load_image("ship.bmp")
ROTATION_STEP = 3
def update(self):
""" Rotate based on keys pressed. """
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(image = Ship.image,
x = games.screen.width/2,
y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash02.py",
"copies": "1",
"size": "2334",
"license": "mit",
"hash": 6989870555527336000,
"line_mean": 27.4634146341,
"line_max": 81,
"alpha_frac": 0.5711225364,
"autogenerated": false,
"ratio": 3.3973799126637556,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.44685024490637554,
"avg_score": null,
"num_lines": null
} |
# Astrocrash03
# Get ship moving
import math, random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
class Ship(games.Sprite):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
def update(self):
""" Rotate and thrust based on keys pressed. """
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# wrap the ship around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(image = Ship.image,
x = games.screen.width/2,
y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash03.py",
"copies": "1",
"size": "3170",
"license": "mit",
"hash": -6622120055722393000,
"line_mean": 28.6261682243,
"line_max": 81,
"alpha_frac": 0.5665615142,
"autogenerated": false,
"ratio": 3.4835164835164836,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.45500779977164835,
"avg_score": null,
"num_lines": null
} |
# Astrocrash04
# Get ship firing missiles
import math, random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
class Ship(games.Sprite):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
def update(self):
""" Rotate and thrust based on keys pressed. """
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# wrap the ship around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
# fire missile if spacebar pressed
if games.keyboard.is_pressed(games.K_SPACE):
new_missile = Missile(self.x, self.y, self.angle)
games.screen.add(new_missile)
class Missile(games.Sprite):
""" A missile launched by the player's ship. """
image = games.load_image("missile.bmp")
sound = games.load_sound("missile.wav")
BUFFER = 40
VELOCITY_FACTOR = 7
LIFETIME = 40
def __init__(self, ship_x, ship_y, ship_angle):
""" Initialize missile sprite. """
Missile.sound.play()
# convert to radians
angle = ship_angle * math.pi / 180
# calculate missile's starting position
buffer_x = Missile.BUFFER * math.sin(angle)
buffer_y = Missile.BUFFER * -math.cos(angle)
x = ship_x + buffer_x
y = ship_y + buffer_y
# calculate missile's velocity components
dx = Missile.VELOCITY_FACTOR * math.sin(angle)
dy = Missile.VELOCITY_FACTOR * -math.cos(angle)
# create the missile
super(Missile, self).__init__(image = Missile.image,
x = x, y = y,
dx = dx, dy = dy)
self.lifetime = Missile.LIFETIME
def update(self):
""" Move the missile. """
# if lifetime is up, destroy the missile
self.lifetime -= 1
if self.lifetime == 0:
self.destroy()
# wrap the missile around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(image = Ship.image,
x = games.screen.width/2,
y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash04.py",
"copies": "1",
"size": "4958",
"license": "mit",
"hash": -8554937211707166000,
"line_mean": 29.2317073171,
"line_max": 81,
"alpha_frac": 0.5586930214,
"autogenerated": false,
"ratio": 3.5138199858256556,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9445862497209232,
"avg_score": 0.025330102003284814,
"num_lines": 164
} |
# Astrocrash05
# Limiting missile fire rate
import math, random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
class Ship(games.Sprite):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
MISSILE_DELAY = 25
def __init__(self, x, y):
""" Initialize ship sprite. """
super(Ship, self).__init__(image = Ship.image, x = x, y = y)
self.missile_wait = 0
def update(self):
""" Rotate and thrust based on keys pressed. """
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# wrap the ship around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
# if waiting until the ship can fire next, decrease wait
if self.missile_wait > 0:
self.missile_wait -= 1
# fire missile if spacebar pressed and missile wait is over
if games.keyboard.is_pressed(games.K_SPACE) and self.missile_wait == 0:
new_missile = Missile(self.x, self.y, self.angle)
games.screen.add(new_missile)
self.missile_wait = Ship.MISSILE_DELAY
class Missile(games.Sprite):
""" A missile launched by the player's ship. """
image = games.load_image("missile.bmp")
sound = games.load_sound("missile.wav")
BUFFER = 40
VELOCITY_FACTOR = 7
LIFETIME = 40
def __init__(self, ship_x, ship_y, ship_angle):
""" Initialize missile sprite. """
Missile.sound.play()
# convert to radians
angle = ship_angle * math.pi / 180
# calculate missile's starting position
buffer_x = Missile.BUFFER * math.sin(angle)
buffer_y = Missile.BUFFER * -math.cos(angle)
x = ship_x + buffer_x
y = ship_y + buffer_y
# calculate missile's velocity components
dx = Missile.VELOCITY_FACTOR * math.sin(angle)
dy = Missile.VELOCITY_FACTOR * -math.cos(angle)
# create the missile
super(Missile, self).__init__(image = Missile.image,
x = x, y = y,
dx = dx, dy = dy)
self.lifetime = Missile.LIFETIME
def update(self):
""" Move the missile. """
# if lifetime is up, destroy the missile
self.lifetime -= 1
if self.lifetime == 0:
self.destroy()
# wrap the missile around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(x = games.screen.width/2, y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash05.py",
"copies": "1",
"size": "5336",
"license": "mit",
"hash": 4815450428095640000,
"line_mean": 29.8439306358,
"line_max": 81,
"alpha_frac": 0.5629685157,
"autogenerated": false,
"ratio": 3.4784876140808345,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4541456129780834,
"avg_score": null,
"num_lines": null
} |
# Astrocrash06
# Handling collisions
import math, random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Asteroid(games.Sprite):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
SPAWN = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def update(self):
""" Wrap around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def die(self):
""" Destroy asteroid. """
# if asteroid isn't small, replace with two smaller asteroids
if self.size != Asteroid.SMALL:
for i in range(Asteroid.SPAWN):
new_asteroid = Asteroid(x = self.x,
y = self.y,
size = self.size - 1)
games.screen.add(new_asteroid)
self.destroy()
class Ship(games.Sprite):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
MISSILE_DELAY = 25
def __init__(self, x, y):
""" Initialize ship sprite. """
super(Ship, self).__init__(image = Ship.image, x = x, y = y)
self.missile_wait = 0
def update(self):
""" Rotate and thrust based on keys pressed. """
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# wrap the ship around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
# if waiting until the ship can fire next, decrease wait
if self.missile_wait > 0:
self.missile_wait -= 1
# fire missile if spacebar pressed and missile wait is over
if games.keyboard.is_pressed(games.K_SPACE) and self.missile_wait == 0:
new_missile = Missile(self.x, self.y, self.angle)
games.screen.add(new_missile)
self.missile_wait = Ship.MISSILE_DELAY
# check if ship overlaps any other object
if self.overlapping_sprites:
for sprite in self.overlapping_sprites:
sprite.die()
self.die()
def die(self):
""" Destroy ship. """
self.destroy()
class Missile(games.Sprite):
""" A missile launched by the player's ship. """
image = games.load_image("missile.bmp")
sound = games.load_sound("missile.wav")
BUFFER = 40
VELOCITY_FACTOR = 7
LIFETIME = 40
def __init__(self, ship_x, ship_y, ship_angle):
""" Initialize missile sprite. """
Missile.sound.play()
# convert to radians
angle = ship_angle * math.pi / 180
# calculate missile's starting position
buffer_x = Missile.BUFFER * math.sin(angle)
buffer_y = Missile.BUFFER * -math.cos(angle)
x = ship_x + buffer_x
y = ship_y + buffer_y
# calculate missile's velocity components
dx = Missile.VELOCITY_FACTOR * math.sin(angle)
dy = Missile.VELOCITY_FACTOR * -math.cos(angle)
# create the missile
super(Missile, self).__init__(image = Missile.image,
x = x, y = y,
dx = dx, dy = dy)
self.lifetime = Missile.LIFETIME
def update(self):
""" Move the missile. """
# if lifetime is up, destroy the missile
self.lifetime -= 1
if self.lifetime == 0:
self.destroy()
# wrap the missile around screen
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
# check if missile overlaps any other object
if self.overlapping_sprites:
for sprite in self.overlapping_sprites:
sprite.die()
self.die()
def die(self):
""" Destroy the missile. """
self.destroy()
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(x = games.screen.width/2, y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash06.py",
"copies": "1",
"size": "6341",
"license": "mit",
"hash": 8866004333180221000,
"line_mean": 30.0833333333,
"line_max": 81,
"alpha_frac": 0.5505440782,
"autogenerated": false,
"ratio": 3.6048891415577033,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4655433219757703,
"avg_score": null,
"num_lines": null
} |
# Astrocrash07
# Add explosions
import math, random
from livewires import games
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Wrapper(games.Sprite):
""" A sprite that wraps around the screen. """
def update(self):
""" Wrap sprite around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def die(self):
""" Destroy self. """
self.destroy()
class Collider(Wrapper):
""" A Wrapper that can collide with another object. """
def update(self):
""" Check for overlapping sprites. """
super(Collider, self).update()
if self.overlapping_sprites:
for sprite in self.overlapping_sprites:
sprite.die()
self.die()
def die(self):
""" Destroy self and leave explosion behind. """
new_explosion = Explosion(x = self.x, y = self.y)
games.screen.add(new_explosion)
self.destroy()
class Asteroid(Wrapper):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
SPAWN = 2
def __init__(self, x, y, size):
""" Initialize asteroid sprite. """
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.size = size
def die(self):
""" Destroy asteroid. """
# if asteroid isn't small, replace with two smaller asteroids
if self.size != Asteroid.SMALL:
for i in range(Asteroid.SPAWN):
new_asteroid = Asteroid(x = self.x,
y = self.y,
size = self.size - 1)
games.screen.add(new_asteroid)
super(Asteroid, self).die()
class Ship(Collider):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
MISSILE_DELAY = 25
def __init__(self, x, y):
""" Initialize ship sprite. """
super(Ship, self).__init__(image = Ship.image, x = x, y = y)
self.missile_wait = 0
def update(self):
""" Rotate, thrust and fire missiles based on keys pressed. """
super(Ship, self).update()
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# if waiting until the ship can fire next, decrease wait
if self.missile_wait > 0:
self.missile_wait -= 1
# fire missile if spacebar pressed and missile wait is over
if games.keyboard.is_pressed(games.K_SPACE) and self.missile_wait == 0:
new_missile = Missile(self.x, self.y, self.angle)
games.screen.add(new_missile)
self.missile_wait = Ship.MISSILE_DELAY
class Missile(Collider):
""" A missile launched by the player's ship. """
image = games.load_image("missile.bmp")
sound = games.load_sound("missile.wav")
BUFFER = 40
VELOCITY_FACTOR = 7
LIFETIME = 40
def __init__(self, ship_x, ship_y, ship_angle):
""" Initialize missile sprite. """
Missile.sound.play()
# convert to radians
angle = ship_angle * math.pi / 180
# calculate missile's starting position
buffer_x = Missile.BUFFER * math.sin(angle)
buffer_y = Missile.BUFFER * -math.cos(angle)
x = ship_x + buffer_x
y = ship_y + buffer_y
# calculate missile's velocity components
dx = Missile.VELOCITY_FACTOR * math.sin(angle)
dy = Missile.VELOCITY_FACTOR * -math.cos(angle)
# create the missile
super(Missile, self).__init__(image = Missile.image,
x = x, y = y,
dx = dx, dy = dy)
self.lifetime = Missile.LIFETIME
def update(self):
""" Move the missile. """
super(Missile, self).update()
# if lifetime is up, destroy the missile
self.lifetime -= 1
if self.lifetime == 0:
self.destroy()
class Explosion(games.Animation):
""" Explosion animation. """
sound = games.load_sound("explosion.wav")
images = ["explosion1.bmp",
"explosion2.bmp",
"explosion3.bmp",
"explosion4.bmp",
"explosion5.bmp",
"explosion6.bmp",
"explosion7.bmp",
"explosion8.bmp",
"explosion9.bmp"]
def __init__(self, x, y):
super(Explosion, self).__init__(images = Explosion.images,
x = x, y = y,
repeat_interval = 4, n_repeats = 1,
is_collideable = False)
Explosion.sound.play()
def main():
# establish background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# create 8 asteroids
for i in range(8):
x = random.randrange(games.screen.width)
y = random.randrange(games.screen.height)
size = random.choice([Asteroid.SMALL, Asteroid.MEDIUM, Asteroid.LARGE])
new_asteroid = Asteroid(x = x, y = y, size = size)
games.screen.add(new_asteroid)
# create the ship
the_ship = Ship(x = games.screen.width/2, y = games.screen.height/2)
games.screen.add(the_ship)
games.screen.mainloop()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash07.py",
"copies": "1",
"size": "6701",
"license": "mit",
"hash": 897210329740537500,
"line_mean": 31.0622009569,
"line_max": 81,
"alpha_frac": 0.5420086554,
"autogenerated": false,
"ratio": 3.6241211465657113,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.46661298019657116,
"avg_score": null,
"num_lines": null
} |
# Astrocrash08
# Add Game object for complete program
import math, random
from livewires import games, color
games.init(screen_width = 640, screen_height = 480, fps = 50)
class Wrapper(games.Sprite):
""" A sprite that wraps around the screen. """
def update(self):
""" Wrap sprite around screen. """
if self.top > games.screen.height:
self.bottom = 0
if self.bottom < 0:
self.top = games.screen.height
if self.left > games.screen.width:
self.right = 0
if self.right < 0:
self.left = games.screen.width
def die(self):
""" Destroy self. """
self.destroy()
class Collider(Wrapper):
""" A Wrapper that can collide with another object. """
def update(self):
""" Check for overlapping sprites. """
super(Collider, self).update()
if self.overlapping_sprites:
for sprite in self.overlapping_sprites:
sprite.die()
self.die()
def die(self):
""" Destroy self and leave explosion behind. """
new_explosion = Explosion(x = self.x, y = self.y)
games.screen.add(new_explosion)
self.destroy()
class Asteroid(Wrapper):
""" An asteroid which floats across the screen. """
SMALL = 1
MEDIUM = 2
LARGE = 3
images = {SMALL : games.load_image("asteroid_small.bmp"),
MEDIUM : games.load_image("asteroid_med.bmp"),
LARGE : games.load_image("asteroid_big.bmp") }
SPEED = 2
SPAWN = 2
POINTS = 30
total = 0
def __init__(self, game, x, y, size):
""" Initialize asteroid sprite. """
Asteroid.total += 1
super(Asteroid, self).__init__(
image = Asteroid.images[size],
x = x, y = y,
dx = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size,
dy = random.choice([1, -1]) * Asteroid.SPEED * random.random()/size)
self.game = game
self.size = size
def die(self):
""" Destroy asteroid. """
Asteroid.total -= 1
self.game.score.value += int(Asteroid.POINTS / self.size)
self.game.score.right = games.screen.width - 10
# if asteroid isn't small, replace with two smaller asteroids
if self.size != Asteroid.SMALL:
for i in range(Asteroid.SPAWN):
new_asteroid = Asteroid(game = self.game,
x = self.x,
y = self.y,
size = self.size - 1)
games.screen.add(new_asteroid)
# if all asteroids are gone, advance to next level
if Asteroid.total == 0:
self.game.advance()
super(Asteroid, self).die()
class Ship(Collider):
""" The player's ship. """
image = games.load_image("ship.bmp")
sound = games.load_sound("thrust.wav")
ROTATION_STEP = 3
VELOCITY_STEP = .03
VELOCITY_MAX = 3
MISSILE_DELAY = 25
def __init__(self, game, x, y):
""" Initialize ship sprite. """
super(Ship, self).__init__(image = Ship.image, x = x, y = y)
self.game = game
self.missile_wait = 0
def update(self):
""" Rotate, thrust and fire missiles based on keys pressed. """
super(Ship, self).update()
# rotate based on left and right arrow keys
if games.keyboard.is_pressed(games.K_LEFT):
self.angle -= Ship.ROTATION_STEP
if games.keyboard.is_pressed(games.K_RIGHT):
self.angle += Ship.ROTATION_STEP
# apply thrust based on up arrow key
if games.keyboard.is_pressed(games.K_UP):
Ship.sound.play()
# change velocity components based on ship's angle
angle = self.angle * math.pi / 180 # convert to radians
self.dx += Ship.VELOCITY_STEP * math.sin(angle)
self.dy += Ship.VELOCITY_STEP * -math.cos(angle)
# cap velocity in each direction
self.dx = min(max(self.dx, -Ship.VELOCITY_MAX), Ship.VELOCITY_MAX)
self.dy = min(max(self.dy, -Ship.VELOCITY_MAX), Ship.VELOCITY_MAX)
# if waiting until the ship can fire next, decrease wait
if self.missile_wait > 0:
self.missile_wait -= 1
# fire missile if spacebar pressed and missile wait is over
if games.keyboard.is_pressed(games.K_SPACE) and self.missile_wait == 0:
new_missile = Missile(self.x, self.y, self.angle)
games.screen.add(new_missile)
self.missile_wait = Ship.MISSILE_DELAY
def die(self):
""" Destroy ship and end the game. """
self.game.end()
super(Ship, self).die()
class Missile(Collider):
""" A missile launched by the player's ship. """
image = games.load_image("missile.bmp")
sound = games.load_sound("missile.wav")
BUFFER = 40
VELOCITY_FACTOR = 7
LIFETIME = 40
def __init__(self, ship_x, ship_y, ship_angle):
""" Initialize missile sprite. """
Missile.sound.play()
# convert to radians
angle = ship_angle * math.pi / 180
# calculate missile's starting position
buffer_x = Missile.BUFFER * math.sin(angle)
buffer_y = Missile.BUFFER * -math.cos(angle)
x = ship_x + buffer_x
y = ship_y + buffer_y
# calculate missile's velocity components
dx = Missile.VELOCITY_FACTOR * math.sin(angle)
dy = Missile.VELOCITY_FACTOR * -math.cos(angle)
# create the missile
super(Missile, self).__init__(image = Missile.image,
x = x, y = y,
dx = dx, dy = dy)
self.lifetime = Missile.LIFETIME
def update(self):
""" Move the missile. """
super(Missile, self).update()
# if lifetime is up, destroy the missile
self.lifetime -= 1
if self.lifetime == 0:
self.destroy()
class Explosion(games.Animation):
""" Explosion animation. """
sound = games.load_sound("explosion.wav")
images = ["explosion1.bmp",
"explosion2.bmp",
"explosion3.bmp",
"explosion4.bmp",
"explosion5.bmp",
"explosion6.bmp",
"explosion7.bmp",
"explosion8.bmp",
"explosion9.bmp"]
def __init__(self, x, y):
super(Explosion, self).__init__(images = Explosion.images,
x = x, y = y,
repeat_interval = 4, n_repeats = 1,
is_collideable = False)
Explosion.sound.play()
class Game(object):
""" The game itself. """
def __init__(self):
""" Initialize Game object. """
# set level
self.level = 0
# load sound for level advance
self.sound = games.load_sound("level.wav")
# create score
self.score = games.Text(value = 0,
size = 30,
color = color.white,
top = 5,
right = games.screen.width - 10,
is_collideable = False)
games.screen.add(self.score)
# create player's ship
self.ship = Ship(game = self,
x = games.screen.width/2,
y = games.screen.height/2)
games.screen.add(self.ship)
def play(self):
""" Play the game. """
# begin theme music
games.music.load("theme.mid")
games.music.play(-1)
# load and set background
nebula_image = games.load_image("nebula.jpg")
games.screen.background = nebula_image
# advance to level 1
self.advance()
# start play
games.screen.mainloop()
def advance(self):
""" Advance to the next game level. """
self.level += 1
# amount of space around ship to preserve when creating asteroids
BUFFER = 150
# create new asteroids
for i in range(self.level):
# calculate an x and y at least BUFFER distance from the ship
# choose minimum distance along x-axis and y-axis
x_min = random.randrange(BUFFER)
y_min = BUFFER - x_min
# choose distance along x-axis and y-axis based on minimum distance
x_distance = random.randrange(x_min, games.screen.width - x_min)
y_distance = random.randrange(y_min, games.screen.height - y_min)
# calculate location based on distance
x = self.ship.x + x_distance
y = self.ship.y + y_distance
# wrap around screen, if necessary
x %= games.screen.width
y %= games.screen.height
# create the asteroid
new_asteroid = Asteroid(game = self,
x = x, y = y,
size = Asteroid.LARGE)
games.screen.add(new_asteroid)
# display level number
level_message = games.Message(value = "Level " + str(self.level),
size = 40,
color = color.yellow,
x = games.screen.width/2,
y = games.screen.width/10,
lifetime = 3 * games.screen.fps,
is_collideable = False)
games.screen.add(level_message)
# play new level sound (except at first level)
if self.level > 1:
self.sound.play()
def end(self):
""" End the game. """
# show 'Game Over' for 5 seconds
end_message = games.Message(value = "Game Over",
size = 90,
color = color.red,
x = games.screen.width/2,
y = games.screen.height/2,
lifetime = 5 * games.screen.fps,
after_death = games.screen.quit,
is_collideable = False)
games.screen.add(end_message)
def main():
astrocrash = Game()
astrocrash.play()
# kick it off!
main()
| {
"repo_name": "bohdan-shramko/learning-python",
"path": "source/chapter12/astrocrash08.py",
"copies": "1",
"size": "10681",
"license": "mit",
"hash": -9032661210230572000,
"line_mean": 32.0681114551,
"line_max": 81,
"alpha_frac": 0.5066005056,
"autogenerated": false,
"ratio": 3.9196330275229356,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9858033148371732,
"avg_score": 0.0136400769502407,
"num_lines": 323
} |
"""Astroid hooks for the Python 2 GObject introspection bindings.
Helps with understanding everything imported from 'gi.repository'
"""
import inspect
import itertools
import sys
import re
from astroid import MANAGER, AstroidBuildingException
from astroid.builder import AstroidBuilder
_inspected_modules = {}
_identifier_re = r'^[A-Za-z_]\w*$'
def _gi_build_stub(parent):
"""
Inspect the passed module recursively and build stubs for functions,
classes, etc.
"""
classes = {}
functions = {}
constants = {}
methods = {}
for name in dir(parent):
if name.startswith("__"):
continue
# Check if this is a valid name in python
if not re.match(_identifier_re, name):
continue
try:
obj = getattr(parent, name)
except:
continue
if inspect.isclass(obj):
classes[name] = obj
elif (inspect.isfunction(obj) or
inspect.isbuiltin(obj)):
functions[name] = obj
elif (inspect.ismethod(obj) or
inspect.ismethoddescriptor(obj)):
methods[name] = obj
elif type(obj) in [int, str]:
constants[name] = obj
elif (str(obj).startswith("<flags") or
str(obj).startswith("<enum ") or
str(obj).startswith("<GType ") or
inspect.isdatadescriptor(obj)):
constants[name] = 0
elif callable(obj):
# Fall back to a function for anything callable
functions[name] = obj
else:
# Assume everything else is some manner of constant
constants[name] = 0
ret = ""
if constants:
ret += "# %s contants\n\n" % parent.__name__
for name in sorted(constants):
if name[0].isdigit():
# GDK has some busted constant names like
# Gdk.EventType.2BUTTON_PRESS
continue
val = constants[name]
strval = str(val)
if type(val) is str:
strval = '"%s"' % str(val).replace("\\", "\\\\")
ret += "%s = %s\n" % (name, strval)
if ret:
ret += "\n\n"
if functions:
ret += "# %s functions\n\n" % parent.__name__
for name in sorted(functions):
func = functions[name]
ret += "def %s(*args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if methods:
ret += "# %s methods\n\n" % parent.__name__
for name in sorted(methods):
func = methods[name]
ret += "def %s(self, *args, **kwargs):\n" % name
ret += " pass\n"
if ret:
ret += "\n\n"
if classes:
ret += "# %s classes\n\n" % parent.__name__
for name in sorted(classes):
ret += "class %s(object):\n" % name
classret = _gi_build_stub(classes[name])
if not classret:
classret = "pass\n"
for line in classret.splitlines():
ret += " " + line + "\n"
ret += "\n"
return ret
def _import_gi_module(modname):
# we only consider gi.repository submodules
if not modname.startswith('gi.repository.'):
raise AstroidBuildingException()
# build astroid representation unless we already tried so
if modname not in _inspected_modules:
modnames = [modname]
optional_modnames = []
# GLib and GObject may have some special case handling
# in pygobject that we need to cope with. However at
# least as of pygobject3-3.13.91 the _glib module doesn't
# exist anymore, so if treat these modules as optional.
if modname == 'gi.repository.GLib':
optional_modnames.append('gi._glib')
elif modname == 'gi.repository.GObject':
optional_modnames.append('gi._gobject')
try:
modcode = ''
for m in itertools.chain(modnames, optional_modnames):
try:
__import__(m)
modcode += _gi_build_stub(sys.modules[m])
except ImportError:
if m not in optional_modnames:
raise
except ImportError:
astng = _inspected_modules[modname] = None
else:
astng = AstroidBuilder(MANAGER).string_build(modcode, modname)
_inspected_modules[modname] = astng
else:
astng = _inspected_modules[modname]
if astng is None:
raise AstroidBuildingException('Failed to import module %r' % modname)
return astng
MANAGER.register_failed_import_hook(_import_gi_module)
| {
"repo_name": "kevinkindom/chrome_depto_tools",
"path": "third_party/logilab/astroid/brain/py2gi.py",
"copies": "66",
"size": "4619",
"license": "bsd-3-clause",
"hash": -1999952288602879500,
"line_mean": 28.8,
"line_max": 78,
"alpha_frac": 0.5537995237,
"autogenerated": false,
"ratio": 4.051754385964912,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""Astroid hooks for the Python 2 standard library.
Currently help understanding of :
* hashlib.md5 and hashlib.sha1
"""
import sys
from textwrap import dedent
from astroid import (
MANAGER, AsStringRegexpPredicate,
UseInferenceDefault, inference_tip,
YES, InferenceError)
from astroid import exceptions
from astroid import nodes
from astroid.builder import AstroidBuilder
MODULE_TRANSFORMS = {}
PY3K = sys.version_info > (3, 0)
PY33 = sys.version_info >= (3, 3)
# general function
def infer_func_form(node, base_type, context=None, enum=False):
"""Specific inference function for namedtuple or Python 3 enum. """
def infer_first(node):
try:
value = node.infer(context=context).next()
if value is YES:
raise UseInferenceDefault()
else:
return value
except StopIteration:
raise InferenceError()
# node is a CallFunc node, class name as first argument and generated class
# attributes as second argument
if len(node.args) != 2:
# something weird here, go back to class implementation
raise UseInferenceDefault()
# namedtuple or enums list of attributes can be a list of strings or a
# whitespace-separate string
try:
name = infer_first(node.args[0]).value
names = infer_first(node.args[1])
try:
attributes = names.value.replace(',', ' ').split()
except AttributeError:
if not enum:
attributes = [infer_first(const).value for const in names.elts]
else:
# Enums supports either iterator of (name, value) pairs
# or mappings.
# TODO: support only list, tuples and mappings.
if hasattr(names, 'items') and isinstance(names.items, list):
attributes = [infer_first(const[0]).value
for const in names.items
if isinstance(const[0], nodes.Const)]
elif hasattr(names, 'elts'):
# Enums can support either ["a", "b", "c"]
# or [("a", 1), ("b", 2), ...], but they can't
# be mixed.
if all(isinstance(const, nodes.Tuple)
for const in names.elts):
attributes = [infer_first(const.elts[0]).value
for const in names.elts
if isinstance(const, nodes.Tuple)]
else:
attributes = [infer_first(const).value
for const in names.elts]
else:
raise AttributeError
if not attributes:
raise AttributeError
except (AttributeError, exceptions.InferenceError) as exc:
raise UseInferenceDefault()
# we want to return a Class node instance with proper attributes set
class_node = nodes.Class(name, 'docstring')
class_node.parent = node.parent
# set base class=tuple
class_node.bases.append(base_type)
# XXX add __init__(*attributes) method
for attr in attributes:
fake_node = nodes.EmptyNode()
fake_node.parent = class_node
class_node.instance_attrs[attr] = [fake_node]
return class_node, name, attributes
# module specific transformation functions #####################################
def transform(module):
try:
tr = MODULE_TRANSFORMS[module.name]
except KeyError:
pass
else:
tr(module)
MANAGER.register_transform(nodes.Module, transform)
# module specific transformation functions #####################################
def hashlib_transform(module):
template = '''
class %s(object):
def __init__(self, value=''): pass
def digest(self):
return u''
def update(self, value): pass
def hexdigest(self):
return u''
'''
algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512')
classes = "".join(template % hashfunc for hashfunc in algorithms)
fake = AstroidBuilder(MANAGER).string_build(classes)
for hashfunc in algorithms:
module.locals[hashfunc] = fake.locals[hashfunc]
def collections_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
class defaultdict(dict):
default_factory = None
def __missing__(self, key): pass
class deque(object):
maxlen = 0
def __init__(self, iterable=None, maxlen=None): pass
def append(self, x): pass
def appendleft(self, x): pass
def clear(self): pass
def count(self, x): return 0
def extend(self, iterable): pass
def extendleft(self, iterable): pass
def pop(self): pass
def popleft(self): pass
def remove(self, value): pass
def reverse(self): pass
def rotate(self, n): pass
def __iter__(self): return self
''')
for klass in ('deque', 'defaultdict'):
module.locals[klass] = fake.locals[klass]
def pkg_resources_transform(module):
fake = AstroidBuilder(MANAGER).string_build('''
def resource_exists(package_or_requirement, resource_name):
pass
def resource_isdir(package_or_requirement, resource_name):
pass
def resource_filename(package_or_requirement, resource_name):
pass
def resource_stream(package_or_requirement, resource_name):
pass
def resource_string(package_or_requirement, resource_name):
pass
def resource_listdir(package_or_requirement, resource_name):
pass
def extraction_error():
pass
def get_cache_path(archive_name, names=()):
pass
def postprocess(tempname, filename):
pass
def set_extraction_path(path):
pass
def cleanup_resources(force=False):
pass
''')
for func_name, func in fake.locals.items():
module.locals[func_name] = func
def subprocess_transform(module):
if PY3K:
communicate = (bytes('string', 'ascii'), bytes('string', 'ascii'))
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0, restore_signals=True,
start_new_session=False, pass_fds=()):
pass
"""
else:
communicate = ('string', 'string')
init = """
def __init__(self, args, bufsize=0, executable=None,
stdin=None, stdout=None, stderr=None,
preexec_fn=None, close_fds=False, shell=False,
cwd=None, env=None, universal_newlines=False,
startupinfo=None, creationflags=0):
pass
"""
if PY33:
wait_signature = 'def wait(self, timeout=None)'
else:
wait_signature = 'def wait(self)'
fake = AstroidBuilder(MANAGER).string_build('''
class Popen(object):
returncode = pid = 0
stdin = stdout = stderr = file()
%(init)s
def communicate(self, input=None):
return %(communicate)r
%(wait_signature)s:
return self.returncode
def poll(self):
return self.returncode
def send_signal(self, signal):
pass
def terminate(self):
pass
def kill(self):
pass
''' % {'init': init,
'communicate': communicate,
'wait_signature': wait_signature})
for func_name, func in fake.locals.items():
module.locals[func_name] = func
MODULE_TRANSFORMS['hashlib'] = hashlib_transform
MODULE_TRANSFORMS['collections'] = collections_transform
MODULE_TRANSFORMS['pkg_resources'] = pkg_resources_transform
MODULE_TRANSFORMS['subprocess'] = subprocess_transform
# namedtuple support ###########################################################
def infer_named_tuple(node, context=None):
"""Specific inference function for namedtuple CallFunc node"""
class_node, name, attributes = infer_func_form(node, nodes.Tuple._proxied,
context=context)
fake = AstroidBuilder(MANAGER).string_build('''
class %(name)s(tuple):
_fields = %(fields)r
def _asdict(self):
return self.__dict__
@classmethod
def _make(cls, iterable, new=tuple.__new__, len=len):
return new(cls, iterable)
def _replace(_self, **kwds):
result = _self._make(map(kwds.pop, %(fields)r, _self))
if kwds:
raise ValueError('Got unexpected field names: %%r' %% list(kwds))
return result
''' % {'name': name, 'fields': attributes})
class_node.locals['_asdict'] = fake.body[0].locals['_asdict']
class_node.locals['_make'] = fake.body[0].locals['_make']
class_node.locals['_replace'] = fake.body[0].locals['_replace']
class_node.locals['_fields'] = fake.body[0].locals['_fields']
# we use UseInferenceDefault, we can't be a generator so return an iterator
return iter([class_node])
def infer_enum(node, context=None):
""" Specific inference function for enum CallFunc node. """
enum_meta = nodes.Class("EnumMeta", 'docstring')
class_node = infer_func_form(node, enum_meta,
context=context, enum=True)[0]
return iter([class_node.instanciate_class()])
def infer_enum_class(node, context=None):
""" Specific inference for enums. """
names = set(('Enum', 'IntEnum', 'enum.Enum', 'enum.IntEnum'))
for basename in node.basenames:
# TODO: doesn't handle subclasses yet.
if basename not in names:
continue
if node.root().name == 'enum':
# Skip if the class is directly from enum module.
break
for local, values in node.locals.items():
if any(not isinstance(value, nodes.AssName)
for value in values):
continue
parent = values[0].parent
real_value = parent.value
new_targets = []
for target in parent.targets:
# Replace all the assignments with our mocked class.
classdef = dedent('''
class %(name)s(object):
@property
def value(self):
return %(value)s
@property
def name(self):
return %(name)r
%(name)s = %(value)s
''' % {'name': target.name,
'value': real_value.as_string()})
fake = AstroidBuilder(MANAGER).string_build(classdef)[target.name]
fake.parent = target.parent
for method in node.mymethods():
fake.locals[method.name] = [method]
new_targets.append(fake.instanciate_class())
node.locals[local] = new_targets
break
return node
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_named_tuple),
AsStringRegexpPredicate('namedtuple', 'func'))
MANAGER.register_transform(nodes.CallFunc, inference_tip(infer_enum),
AsStringRegexpPredicate('Enum', 'func'))
MANAGER.register_transform(nodes.Class, infer_enum_class)
| {
"repo_name": "CoherentLabs/depot_tools",
"path": "third_party/logilab/astroid/brain/py2stdlib.py",
"copies": "1",
"size": "11271",
"license": "bsd-3-clause",
"hash": 8189697102692949000,
"line_mean": 33.0513595166,
"line_max": 82,
"alpha_frac": 0.5857510425,
"autogenerated": false,
"ratio": 4.222929936305732,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5308680978805732,
"avg_score": null,
"num_lines": null
} |
"""Astroid hooks for various builtins."""
import sys
from functools import partial
from textwrap import dedent
import six
from astroid import (MANAGER, UseInferenceDefault,
inference_tip, YES, InferenceError, UnresolvableName)
from astroid import nodes
from astroid.builder import AstroidBuilder
def _extend_str(class_node, rvalue):
"""function to extend builtin str/unicode class"""
# TODO(cpopa): this approach will make astroid to believe
# that some arguments can be passed by keyword, but
# unfortunately, strings and bytes don't accept keyword arguments.
code = dedent('''
class whatever(object):
def join(self, iterable):
return {rvalue}
def replace(self, old, new, count=None):
return {rvalue}
def format(self, *args, **kwargs):
return {rvalue}
def encode(self, encoding='ascii', errors=None):
return ''
def decode(self, encoding='ascii', errors=None):
return u''
def capitalize(self):
return {rvalue}
def title(self):
return {rvalue}
def lower(self):
return {rvalue}
def upper(self):
return {rvalue}
def swapcase(self):
return {rvalue}
def index(self, sub, start=None, end=None):
return 0
def find(self, sub, start=None, end=None):
return 0
def count(self, sub, start=None, end=None):
return 0
def strip(self, chars=None):
return {rvalue}
def lstrip(self, chars=None):
return {rvalue}
def rstrip(self, chars=None):
return {rvalue}
def rjust(self, width, fillchar=None):
return {rvalue}
def center(self, width, fillchar=None):
return {rvalue}
def ljust(self, width, fillchar=None):
return {rvalue}
''')
code = code.format(rvalue=rvalue)
fake = AstroidBuilder(MANAGER).string_build(code)['whatever']
for method in fake.mymethods():
class_node.locals[method.name] = [method]
method.parent = class_node
def extend_builtins(class_transforms):
from astroid.bases import BUILTINS
builtin_ast = MANAGER.astroid_cache[BUILTINS]
for class_name, transform in class_transforms.items():
transform(builtin_ast[class_name])
if sys.version_info > (3, 0):
extend_builtins({'bytes': partial(_extend_str, rvalue="b''"),
'str': partial(_extend_str, rvalue="''")})
else:
extend_builtins({'str': partial(_extend_str, rvalue="''"),
'unicode': partial(_extend_str, rvalue="u''")})
def register_builtin_transform(transform, builtin_name):
"""Register a new transform function for the given *builtin_name*.
The transform function must accept two parameters, a node and
an optional context.
"""
def _transform_wrapper(node, context=None):
result = transform(node, context=context)
if result:
result.parent = node
result.lineno = node.lineno
result.col_offset = node.col_offset
return iter([result])
MANAGER.register_transform(nodes.CallFunc,
inference_tip(_transform_wrapper),
lambda n: (isinstance(n.func, nodes.Name) and
n.func.name == builtin_name))
def _generic_inference(node, context, node_type, transform):
args = node.args
if not args:
return node_type()
if len(node.args) > 1:
raise UseInferenceDefault()
arg, = args
transformed = transform(arg)
if not transformed:
try:
infered = next(arg.infer(context=context))
except (InferenceError, StopIteration):
raise UseInferenceDefault()
if infered is YES:
raise UseInferenceDefault()
transformed = transform(infered)
if not transformed or transformed is YES:
raise UseInferenceDefault()
return transformed
def _generic_transform(arg, klass, iterables, build_elts):
if isinstance(arg, klass):
return arg
elif isinstance(arg, iterables):
if not all(isinstance(elt, nodes.Const)
for elt in arg.elts):
# TODO(cpopa): Don't support heterogenous elements.
# Not yet, though.
raise UseInferenceDefault()
elts = [elt.value for elt in arg.elts]
elif isinstance(arg, nodes.Dict):
if not all(isinstance(elt[0], nodes.Const)
for elt in arg.items):
raise UseInferenceDefault()
elts = [item[0].value for item in arg.items]
elif (isinstance(arg, nodes.Const) and
isinstance(arg.value, (six.string_types, six.binary_type))):
elts = arg.value
else:
return
return klass(elts=build_elts(elts))
def _infer_builtin(node, context,
klass=None, iterables=None,
build_elts=None):
transform_func = partial(
_generic_transform,
klass=klass,
iterables=iterables,
build_elts=build_elts)
return _generic_inference(node, context, klass, transform_func)
# pylint: disable=invalid-name
infer_tuple = partial(
_infer_builtin,
klass=nodes.Tuple,
iterables=(nodes.List, nodes.Set),
build_elts=tuple)
infer_list = partial(
_infer_builtin,
klass=nodes.List,
iterables=(nodes.Tuple, nodes.Set),
build_elts=list)
infer_set = partial(
_infer_builtin,
klass=nodes.Set,
iterables=(nodes.List, nodes.Tuple),
build_elts=set)
def _get_elts(arg, context):
is_iterable = lambda n: isinstance(n,
(nodes.List, nodes.Tuple, nodes.Set))
try:
infered = next(arg.infer(context))
except (InferenceError, UnresolvableName):
raise UseInferenceDefault()
if isinstance(infered, nodes.Dict):
items = infered.items
elif is_iterable(infered):
items = []
for elt in infered.elts:
# If an item is not a pair of two items,
# then fallback to the default inference.
# Also, take in consideration only hashable items,
# tuples and consts. We are choosing Names as well.
if not is_iterable(elt):
raise UseInferenceDefault()
if len(elt.elts) != 2:
raise UseInferenceDefault()
if not isinstance(elt.elts[0],
(nodes.Tuple, nodes.Const, nodes.Name)):
raise UseInferenceDefault()
items.append(tuple(elt.elts))
else:
raise UseInferenceDefault()
return items
def infer_dict(node, context=None):
"""Try to infer a dict call to a Dict node.
The function treats the following cases:
* dict()
* dict(mapping)
* dict(iterable)
* dict(iterable, **kwargs)
* dict(mapping, **kwargs)
* dict(**kwargs)
If a case can't be infered, we'll fallback to default inference.
"""
has_keywords = lambda args: all(isinstance(arg, nodes.Keyword)
for arg in args)
if not node.args and not node.kwargs:
# dict()
return nodes.Dict()
elif has_keywords(node.args) and node.args:
# dict(a=1, b=2, c=4)
items = [(nodes.Const(arg.arg), arg.value) for arg in node.args]
elif (len(node.args) >= 2 and
has_keywords(node.args[1:])):
# dict(some_iterable, b=2, c=4)
elts = _get_elts(node.args[0], context)
keys = [(nodes.Const(arg.arg), arg.value) for arg in node.args[1:]]
items = elts + keys
elif len(node.args) == 1:
items = _get_elts(node.args[0], context)
else:
raise UseInferenceDefault()
empty = nodes.Dict()
empty.items = items
return empty
# Builtins inference
register_builtin_transform(infer_tuple, 'tuple')
register_builtin_transform(infer_set, 'set')
register_builtin_transform(infer_list, 'list')
register_builtin_transform(infer_dict, 'dict')
| {
"repo_name": "Shouqun/node-gn",
"path": "tools/depot_tools/third_party/logilab/astroid/brain/builtin_inference.py",
"copies": "64",
"size": "8170",
"license": "mit",
"hash": -4434270280445436400,
"line_mean": 32.3469387755,
"line_max": 76,
"alpha_frac": 0.5955936353,
"autogenerated": false,
"ratio": 3.9525882922109337,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": null,
"num_lines": null
} |
"""AstronClientRepository module: contains the AstronClientRepository class"""
from direct.directnotify import DirectNotifyGlobal
from ClientRepositoryBase import ClientRepositoryBase
from MsgTypes import *
from direct.distributed.PyDatagram import PyDatagram
from pandac.PandaModules import STUint16, STUint32
class AstronClientRepository(ClientRepositoryBase):
"""
The Astron implementation of a clients repository for
communication with an Astron ClientAgent.
This repo will emit events for:
* CLIENT_HELLO_RESP
* CLIENT_EJECT ( error_code, reason )
* CLIENT_OBJECT_LEAVING ( do_id )
* CLIENT_ADD_INTEREST ( context, interest_id, parent_id, zone_id )
* CLIENT_ADD_INTEREST_MULTIPLE ( icontext, interest_id, parent_id, [zone_ids] )
* CLIENT_REMOVE_INTEREST ( context, interest_id )
* CLIENT_DONE_INTEREST_RESP ( context, interest_id )
* LOST_CONNECTION ()
"""
notify = DirectNotifyGlobal.directNotify.newCategory("ClientRepository")
# This is required by DoCollectionManager, even though it's not
# used by this implementation.
GameGlobalsId = 0
def __init__(self, *args, **kwargs):
ClientRepositoryBase.__init__(self, *args, **kwargs)
base.finalExitCallbacks.append(self.shutdown)
self.message_handlers = {CLIENT_HELLO_RESP: self.handleHelloResp,
CLIENT_EJECT: self.handleEject,
CLIENT_ENTER_OBJECT_REQUIRED: self.handleEnterObjectRequired,
CLIENT_ENTER_OBJECT_REQUIRED_OWNER: self.handleEnterObjectRequiredOwner,
CLIENT_OBJECT_SET_FIELD: self.handleUpdateField,
CLIENT_OBJECT_SET_FIELDS: self.handleUpdateFields,
CLIENT_OBJECT_LEAVING: self.handleObjectLeaving,
CLIENT_OBJECT_LOCATION: self.handleObjectLocation,
CLIENT_ADD_INTEREST: self.handleAddInterest,
CLIENT_ADD_INTEREST_MULTIPLE: self.handleAddInterestMultiple,
CLIENT_REMOVE_INTEREST: self.handleRemoveInterest,
CLIENT_DONE_INTEREST_RESP: self.handleInterestDoneMessage,
}
#
# Message Handling
#
def handleDatagram(self, di):
msgType = self.getMsgType()
# self.handleMessageType(msgType, di)
#
#def handleMessageType(self, msgType, di):
if msgType in self.message_handlers:
self.message_handlers[msgType](di)
else:
self.notify.error("Got unknown message type %d!" % (msgType,))
self.considerHeartbeat()
def handleHelloResp(self, di):
messenger.send("CLIENT_HELLO_RESP", [])
def handleEject(self, di):
error_code = di.get_uint16()
reason = di.get_string()
messenger.send("CLIENT_EJECT", [error_code, reason])
def handleEnterObjectRequired(self, di):
do_id = di.getArg(STUint32)
parent_id = di.getArg(STUint32)
zone_id = di.getArg(STUint32)
dclass_id = di.getArg(STUint16)
dclass = self.dclassesByNumber[dclass_id]
self.generateWithRequiredFields(dclass, do_id, di, parent_id, zone_id)
def handleEnterObjectRequiredOwner(self, di):
avatar_doId = di.getArg(STUint32)
parentId = di.getArg(STUint32)
zoneId = di.getArg(STUint32)
dclass_id = di.getArg(STUint16)
dclass = self.dclassesByNumber[dclass_id]
self.generateWithRequiredFieldsOwner(dclass, avatar_doId, di)
def generateWithRequiredFieldsOwner(self, dclass, doId, di):
if doId in self.doId2ownerView:
# ...it is in our dictionary.
# Just update it.
self.notify.error('duplicate owner generate for %s (%s)' % (
doId, dclass.getName()))
distObj = self.doId2ownerView[doId]
assert distObj.dclass == dclass
distObj.generate()
distObj.updateRequiredFields(dclass, di)
# updateRequiredFields calls announceGenerate
elif self.cacheOwner.contains(doId):
# ...it is in the cache.
# Pull it out of the cache:
distObj = self.cacheOwner.retrieve(doId)
assert distObj.dclass == dclass
# put it in the dictionary:
self.doId2ownerView[doId] = distObj
# and update it.
distObj.generate()
distObj.updateRequiredFields(dclass, di)
# updateRequiredFields calls announceGenerate
else:
# ...it is not in the dictionary or the cache.
# Construct a new one
classDef = dclass.getOwnerClassDef()
if classDef == None:
self.notify.error("Could not create an undefined %s object. Have you created an owner view?" % (dclass.getName()))
distObj = classDef(self)
distObj.dclass = dclass
# Assign it an Id
distObj.doId = doId
# Put the new do in the dictionary
self.doId2ownerView[doId] = distObj
# Update the required fields
distObj.generateInit() # Only called when constructed
distObj.generate()
distObj.updateRequiredFields(dclass, di)
# updateRequiredFields calls announceGenerate
return distObj
def handleUpdateFields(self, di):
# Can't test this without the server actually sending it.
self.notify.error("CLIENT_OBJECT_SET_FIELDS not implemented!")
# # Here's some tentative code and notes:
# do_id = di.getUint32()
# field_count = di.getUint16()
# for i in range(0, field_count):
# field_id = di.getUint16()
# field = self.get_dc_file().get_field_by_index(field_id)
# # print(type(field))
# # print(field)
# # FIXME: Get field type, unpack value, create and send message.
# # value = di.get?()
# # Assemble new message
def handleObjectLeaving(self, di):
do_id = di.get_uint32()
dist_obj = self.doId2do.get(do_id)
dist_obj.delete()
self.deleteObject(do_id)
messenger.send("CLIENT_OBJECT_LEAVING", [do_id])
def handleAddInterest(self, di):
context = di.get_uint32()
interest_id = di.get_uint16()
parent_id = di.get_uint32()
zone_id = di.get_uint32()
messenger.send("CLIENT_ADD_INTEREST", [context, interest_id, parent_id, zone_id])
def handleAddInterestMultiple(self, di):
context = di.get_uint32()
interest_id = di.get_uint16()
parent_id = di.get_uint32()
zone_ids = [di.get_uint32() for i in range(0, di.get_uint16())]
messenger.send("CLIENT_ADD_INTEREST_MULTIPLE", [context, interest_id, parent_id, zone_ids])
def handleRemoveInterest(self, di):
context = di.get_uint32()
interest_id = di.get_uint16()
messenger.send("CLIENT_REMOVE_INTEREST", [context, interest_id])
def deleteObject(self, doId):
"""
implementation copied from ClientRepository.py
Removes the object from the client's view of the world. This
should normally not be called directly except in the case of
error recovery, since the server will normally be responsible
for deleting and disabling objects as they go out of scope.
After this is called, future updates by server on this object
will be ignored (with a warning message). The object will
become valid again the next time the server sends a generate
message for this doId.
This is not a distributed message and does not delete the
object on the server or on any other client.
"""
if doId in self.doId2do:
# If it is in the dictionary, remove it.
obj = self.doId2do[doId]
# Remove it from the dictionary
del self.doId2do[doId]
# Disable, announce, and delete the object itself...
# unless delayDelete is on...
obj.deleteOrDelay()
if self.isLocalId(doId):
self.freeDoId(doId)
elif self.cache.contains(doId):
# If it is in the cache, remove it.
self.cache.delete(doId)
if self.isLocalId(doId):
self.freeDoId(doId)
else:
# Otherwise, ignore it
self.notify.warning(
"Asked to delete non-existent DistObj " + str(doId))
#
# Sending messages
#
def sendUpdate(self, distObj, fieldName, args):
""" Sends a normal update for a single field. """
dg = distObj.dclass.clientFormatUpdate(
fieldName, distObj.doId, args)
self.send(dg)
# FIXME: The version string should default to a .prc variable.
def sendHello(self, version_string):
dg = PyDatagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(self.get_dc_file().get_hash())
dg.add_string(version_string)
self.send(dg)
def sendHeartbeat(self):
datagram = PyDatagram()
datagram.addUint16(CLIENT_HEARTBEAT)
self.send(datagram)
def sendAddInterest(self, context, interest_id, parent_id, zone_id):
dg = PyDatagram()
dg.add_uint16(CLIENT_ADD_INTEREST)
dg.add_uint32(context)
dg.add_uint16(interest_id)
dg.add_uint32(parent_id)
dg.add_uint32(zone_id)
self.send(dg)
def sendAddInterestMultiple(self, context, interest_id, parent_id, zone_ids):
dg = PyDatagram()
dg.add_uint16(CLIENT_ADD_INTEREST_MULTIPLE)
dg.add_uint32(context)
dg.add_uint16(interest_id)
dg.add_uint32(parent_id)
dg.add_uint16(len(zone_ids))
for zone_id in zone_ids:
dg.add_uint32(zone_id)
self.send(dg)
def sendRemoveInterest(self, context, interest_id):
dg = PyDatagram()
dg.add_uint16(CLIENT_REMOVE_INTEREST)
dg.add_uint32(context)
dg.add_uint16(interest_id)
self.send(dg)
#
# Other stuff
#
def lostConnection(self):
messenger.send("LOST_CONNECTION")
def disconnect(self):
"""
This implicitly deletes all objects from the repository.
"""
for do_id in self.doId2do.keys():
self.deleteObject(do_id)
ClientRepositoryBase.disconnect(self)
| {
"repo_name": "toontownfunserver/Panda3D-1.9.0",
"path": "direct/distributed/AstronClientRepository.py",
"copies": "4",
"size": "10641",
"license": "bsd-3-clause",
"hash": -127804528039695140,
"line_mean": 38.4111111111,
"line_max": 130,
"alpha_frac": 0.6046424208,
"autogenerated": false,
"ratio": 3.8345945945945945,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.006969018300316533,
"num_lines": 270
} |
"""Astronomical and physics constants.
This module complements constants defined in `astropy.constants`,
with gravitational paremeters and radii.
Note that `GM_jupiter` and `GM_neptune` are both referred to the whole planetary system gravitational parameter.
Unless otherwise specified, gravitational and mass parameters were obtained from:
* Luzum, Brian et al. “The IAU 2009 System of Astronomical Constants: The Report of the IAU Working Group on Numerical
Standards for Fundamental Astronomy.” Celestial Mechanics and Dynamical Astronomy 110.4 (2011): 293–304.
Crossref. Web. `DOI: 10.1007/s10569-011-9352-4`_
and radii were obtained from:
* Archinal, B. A. et al. “Report of the IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009.”
Celestial Mechanics and Dynamical Astronomy 109.2 (2010): 101–135. Crossref. Web. `DOI: 10.1007/s10569-010-9320-4`_
.. _`DOI: 10.1007/s10569-011-9352-4`: http://dx.doi.org/10.1007/s10569-011-9352-4
.. _`DOI: 10.1007/s10569-010-9320-4`: http://dx.doi.org/10.1007/s10569-010-9320-4
"""
from astropy.constants import Constant
from astropy import time
J2000 = time.Time('J2000', scale='tdb')
GM_sun = Constant('GM_sun', 'Heliocentric gravitational constant', 1.32712442099e20, 'm3 / (s2)', 0.0000000001e20,
'IAU 2009 system of astronomical constants', system='si')
GM_earth = Constant('GM_earth', 'Geocentric gravitational constant', 3.986004418e14, 'm3 / (s2)', 0.000000008e14,
'IAU 2009 system of astronomical constants', system='si')
# Anderson, John D. et al. “The Mass, Gravity Field, and Ephemeris of Mercury.” Icarus 71.3 (1987): 337–349.
# Crossref. Web. DOI: 10.1016/0019-1035(87)90033-9
GM_mercury = Constant('GM_mercury', 'Mercury gravitational constant', 2.203209e13, 'm3 / (s2)', 0.91,
'IAU 2009 system of astronomical constants', system='si')
# Konopliv, A.S., W.B. Banerdt, and W.L. Sjogren. “Venus Gravity: 180th Degree and Order Model.”
# Icarus 139.1 (1999): 3–18. Crossref. Web. DOI: 10.1006/icar.1999.6086
GM_venus = Constant('GM_venus', 'Venus gravitational constant', 3.24858592e14, 'm3 / (s2)', 0.006,
'IAU 2009 system of astronomical constants', system='si')
# Konopliv, Alex S. et al. “A Global Solution for the Mars Static and Seasonal Gravity, Mars Orientation, Phobos and
# Deimos Masses, and Mars Ephemeris.” Icarus 182.1 (2006): 23–50.
# Crossref. Web. DOI: 10.1016/j.icarus.2005.12.025
GM_mars = Constant('GM_mars', 'Mars gravitational constant', 4.282837440e13, 'm3 / (s2)', 0.00028,
'IAU 2009 system of astronomical constants', system='si')
# Jacobson, R. A. et al. “A comprehensive orbit reconstruction for the galileo prime mission in the JS200 system.”
# The Journal of the Astronautical Sciences 48.4 (2000): 495–516.
# Crossref. Web.
GM_jupiter = Constant('GM_jupiter', 'Jovian system gravitational constant', 1.2671276253e17, 'm3 / (s2)', 2.00,
'IAU 2009 system of astronomical constants', system='si')
# Jacobson, R. A. et al. “The Gravity Field of the Saturnian System from Satellite Observations and Spacecraft
# Tracking Data.” The Astronomical Journal 132.6 (2006): 2520–2526.
# Crossref. Web. DOI: 10.1086/508812
GM_saturn = Constant('GM_saturn', 'Saturn gravitational constant', 3.79312077e16, 'm3 / (s2)', 1.1,
'IAU 2009 system of astronomical constants', system='si')
# Jacobson, R. A. et al. “The Masses of Uranus and Its Major Satellites from Voyager Tracking Data and Earth-Based
# Uranian Satellite Data.” The Astronomical Journal 103 (1992): 2068.
# Crossref. Web. DOI: 10.1086/116211
GM_uranus = Constant('GM_uranus', 'Uranus gravitational constant', 5.7939393e15, 'm3 / (s2)', 13.0,
'IAU 2009 system of astronomical constants', system='si')
# Jacobson, R. A. “THE ORBITS OF THE NEPTUNIAN SATELLITES AND THE ORIENTATION OF THE POLE OF NEPTUNE.”
# The Astronomical Journal 137.5 (2009): 4322–4329. Crossref. Web. DOI:
# 10.1088/0004-6256/137/5/4322
GM_neptune = Constant('GM_neptune', 'Neptunian system gravitational constant', 6.836527100580397e15, 'm3 / (s2)', 10.0,
'IAU 2009 system of astronomical constants', system='si')
# Tholen, David J. et al. “MASSES OF NIX AND HYDRA.” The Astronomical Journal 135.3 (2008): 777–784. Crossref. Web.
# DOI: 10.1088/0004-6256/135/3/777
GM_pluto = Constant('GM_pluto', 'Pluto gravitational constant', 8.703e11, 'm3 / (s2)', 3.7,
'IAU 2009 system of astronomical constants', system='si')
# Lemoine, Frank G. et al. “High-Degree Gravity Models from GRAIL Primary Mission Data.”
# Journal of Geophysical Research: Planets 118.8 (2013): 1676–1698.
# Crossref. Web. DOI: 10.1002/jgre.20118
GM_moon = Constant('GM_moon', 'Moon gravitational constant', 4.90279981e12, 'm3 / (s2)', 0.00000774,
'Journal of Geophysical Research: Planets 118.8 (2013)', system='si')
R_sun = Constant('R_sun', 'Sun equatorial radius', 6.96000e8, 'm', 0,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_earth = Constant('R_earth', 'Earth equatorial radius', 6.3781366e6, 'm', 0.0001,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_mercury = Constant('R_mercury', 'Mercury equatorial radius', 2.4397e6, 'm', 1.0,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_venus = Constant('R_venus', 'Venus equatorial radius', 6.0518e6, 'm', 1.0,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_mars = Constant('R_mars', 'Mars equatorial radius', 3.39619e6, 'm', 0.1,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_jupiter = Constant('R_jupiter', 'Jupiter equatorial radius', 7.1492e7, 'm', 4,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_saturn = Constant('R_saturn', 'Saturn equatorial radius', 6.0268e7, 'm', 4,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_uranus = Constant('R_uranus', 'Uranus equatorial radius', 2.5559e7, 'm', 4,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_neptune = Constant('R_neptune', 'Neptune equatorial radius', 2.4764e7, 'm', 15,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_pluto = Constant('R_pluto', 'Pluto effective radius', 1.195e6, 'm', 5,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
R_moon = Constant('R_moon', 'Moon equatorial radius', 1.7374e6, 'm', 1,
'IAU Working Group on Cartographic Coordinates and Rotational Elements: 2009', system='si')
| {
"repo_name": "anhiga/poliastro",
"path": "src/poliastro/constants.py",
"copies": "1",
"size": "7113",
"license": "mit",
"hash": 1182490510246761000,
"line_mean": 58.2352941176,
"line_max": 119,
"alpha_frac": 0.6876152646,
"autogenerated": false,
"ratio": 2.7938961553705908,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8963515721467232,
"avg_score": 0.0035991397006715797,
"num_lines": 119
} |
"""Astronomical coordinate functions."""
# p2.6+ compatibility
from __future__ import division, print_function, unicode_literals
try:
unicode
except NameError:
unicode = basestring = str
xrange = range
import re
import numpy as np
from numpy.core.records import fromarrays
from math import pi, cos
DEG_PER_HR = 360. / 24.
DEG_PER_MIN = DEG_PER_HR / 60.
DEG_PER_S = DEG_PER_MIN / 60.
DEG_PER_AMIN = 1./60.
DEG_PER_ASEC = DEG_PER_AMIN / 60.
RAD_PER_DEG = pi / 180.
DEG_PER_RAD = 180. / pi
def _radec_to_xyz(ra_deg, dec_deg):
""" Convert RA and Dec to xyz positions on a unit sphere.
Parameters
----------
ra_deg, dec_deg : float or arrays of floats, shape (N,)
RA and Dec in degrees.
Returns an array of floats with shape (N, 3).
"""
ra = np.asarray(ra_deg) * RAD_PER_DEG
dec = np.asarray(dec_deg) * RAD_PER_DEG
cosd = np.cos(dec)
xyz = np.array([cosd * np.cos(ra),
cosd * np.sin(ra),
np.sin(dec)]).T
return np.atleast_2d(xyz)
def _distsq(ra1, dec1, ra2, dec2):
""" Find the distance squared in xyz space between two RAs and
Decs.
Parameters
----------
ra1, dec1 : floats or arrays of floats, shape (N,)
ra2, dec2 : floats or arrays of floats, shape (M,)
Returns
-------
distance_squared: array of floats shape (N, M)
If N or M is 1, that dimension is suppressed.
"""
xyz1 = _radec_to_xyz(ra1, dec1)
xyz2 = _radec_to_xyz(ra2, dec2)
n = xyz1.shape[0]
m = xyz2.shape[0]
d2 = np.empty((n, m))
for i in range(n):
d2[i,:] = ((xyz1[i,:] - xyz2)**2).sum(axis=1)
d2 = d2.squeeze()
if len(d2.shape) == 0:
d2 = float(d2)
return d2
def _radians_to_distsq(radians):
""" Convert to a squared xyz separation from an angle.
The input is the angle in radians. The conversion is done on a
unit sphere using the cosine rule.
"""
return 2 * (1 - np.cos(radians))
def _distsq_to_radians(distsq):
""" Convert to an angle from a squared xyz separation.
The output angle is in radians. The conversion is done on a unit
sphere using the cosine rule.
"""
return np.arccos(1 - 0.5 * distsq)
def _check_ra_dec(ra, dec):
""" Check 0 <= RA < 360 and -90 <= Dec <= 90.
Raises a ValueError outside these limits.
Parameters
----------
ra, dec : floats or arrays of floats
RA and Dec in degrees.
"""
ra = np.atleast_1d(ra)
dec = np.atleast_1d(dec)
msg = []
if (ra < 0).any():
msg.append('RA must be >= 0, %f' % ra[ra < 0][0])
if (ra >= 360).any():
msg.append('RA must be < 360, %f' % ra[ra >= 360][0])
if (dec < -90).any():
msg.append('Dec must be >= -90, %f' % dec[dec < -90][0])
if (dec > 90).any():
msg.append('Dec must be <= 90, %f' % dec[dec > 90][0])
if msg:
raise ValueError('\n'.join(msg))
def ang_sep(ra1, dec1, ra2, dec2):
""" Returns the angular separation in degrees on the celestial
sphere between two RA/Dec coordinates.
Parameters
----------
ra1, dec1 : floats or arrays of floats, shape (N,)
First set of coordinates in degrees.
ra2, dec2 : floats or arrays of floats, shape (M,)
Second set of coordinates in degrees.
Returns
-------
separation_in_degrees : array of floats, shape (N, M)
If N or M is 1, that dimension is suppressed.
"""
_check_ra_dec(ra1, dec1)
_check_ra_dec(ra2, dec2)
d2 = _distsq(ra1, dec1, ra2, dec2)
return DEG_PER_RAD * _distsq_to_radians(d2)
def ra_dec2s(ra, raformat='%02.0f %02.0f %06.3f'):
""" Converts a decimal RA to a sexigesimal string.
Uses the format given by the raformat keyword
"""
ra = float(ra)
if not (0.0 <= ra < 360.):
raise ValueError("RA outside sensible limits: %s" % ra)
rah, temp = divmod(ra, DEG_PER_HR)
ram, temp = divmod(temp, DEG_PER_MIN)
ras = temp / DEG_PER_S
s_ra = raformat % (rah, ram, ras)
return s_ra
def dec_dec2s(dec, decformat='%02.0f %02.0f %05.2f'):
""" Converts a decimal Dec to a sexigesimal string.
Returns two strings, RA and Dec.
"""
dec = float(dec)
if dec < 0.:
dec *= -1.
negdec = True
else: negdec = False
# error checking
if dec > 90.:
raise ValueError("Dec outside sensible limits: %s" % dec)
decd, temp = divmod(dec, 1)
decm, temp = divmod(temp, DEG_PER_AMIN)
decs = temp / DEG_PER_ASEC
if negdec:
s_dec = '-' + decformat % (decd, decm, decs)
else: s_dec = '+' + decformat % (decd, decm, decs)
return s_dec
def dec2s(ra, dec):
""" Convert an RA and Dec from degrees to sexigesimal.
Parameters
----------
ra, dec: floats or arrays of floats, shape (N,)
The RA and Dec in degrees.
Returns
-------
ra, dec: str or arrays of str, shape (N,)
The RA and Dec in 'hour:min:s' 'deg:min:s' format.
"""
try:
return ra_dec2s(ra), dec_dec2s(dec)
except TypeError:
pass
radec = [(ra_dec2s(r), dec_dec2s(d)) for r, d in zip(ra, dec)]
return tuple(zip(*radec))
def ra_s2dec(ra):
""" Converts a sexigesimal RA string to decimal.
Parameters
----------
ra : string or sequence of three strings
The input hour, minute and second. If a string, separators
between hours minutes and seconds can be whitespace, colons or
h, m. s.
"""
if isinstance(ra, basestring):
ra = re.sub('[:hms]', ' ', ra)
ra = ra.split()
rah,ram,ras = [float(item) for item in ra]
if not 0. <= rah <= 24. or not 0. <= ram <= 60. or not 0. <= ras <= 60.:
raise ValueError('RA is outside sensible limits. RA = %s' % ra)
d_ra = DEG_PER_HR * rah + DEG_PER_MIN * ram + DEG_PER_S * ras
return d_ra
def dec_s2dec(dec):
""" Converts a sexigesimal Dec string to decimal degrees.
The separators between deg/arcmin/arcsec can be whitespace or
colons or d m s.
"""
# Convert to floats, noting sign of dec
if isinstance(dec, basestring):
dec = re.sub('[:dms]', ' ', dec)
dec = dec.split()
if dec[0].lstrip()[0] == '-':
negdec = True
else:
negdec = False
decd,decm,decs = [float(item) for item in dec]
if negdec:
decd *= -1.
# Error checking
if decd > 90. or decm >= 60. or decs > 60:
raise ValueError('Dec is outside sensible limits: Dec = %s' % dec)
d_dec = decd + DEG_PER_AMIN * decm + DEG_PER_ASEC * decs
if negdec:
d_dec *= -1.
return d_dec
def s2dec(ra, dec):
""" Convert sexigesimal ra and dec strings (or list of ras and decs) to
decimal degrees.
Parameters
----------
ra, dec: str or arrays of str, shape (N,)
The RA and Dec in 'hour:min:s' 'deg:min:s' format. Separators
may be whitespace, colons, 'h', 'm', 's' or 'd'.
Returns
-------
ra, dec: floats or arrays of floats, shape (N,)
The RA and Dec in degrees.
Examples
--------
>>> s2dec('02h59m00.56s', '-80d10m04.3s')
(44.75233333333333, -80.16786111111112)
>>> sras = ['10:12:01.25', '10:14:06.13']
>>> sdecs =['01:01:45.65', '01:13:47.02']
>>> ra, dec = s2dec(sras, sdecs)
>>> list(zip(ra, dec))
[(153.00520833333334, 1.0293472222222222),
(153.52554166666667, 1.229727777777778)]
"""
if isinstance(ra, basestring):
return ra_s2dec(ra), dec_s2dec(dec)
radec = [(ra_s2dec(r), dec_s2dec(d)) for r, d in zip(ra, dec)]
return tuple(map(np.array, zip(*radec)))
def match(ra1, dec1, ra2, dec2, tol, allmatches=False):
""" Given two sets of numpy arrays of ra,dec and a tolerance tol,
returns an array of indices and separations with the same length
as the first input array.
If an index is > 0, it is the index of the closest matching second
array element within tol arcsec. If it's -1, then there was no
matching ra/dec within tol arcsec.
If allmatches = True, then for each object in the first array,
return the index and separation of everything in the second array
within the search tolerance, not just the closest match.
See Also
--------
indmatch, unique_radec
Notes
-----
To get the indices of objects in ra2, dec2 without a match, use
>>> imatch = match(ra1, dec1, ra2, dec2, 2.)
>>> inomatch = numpy.setdiff1d(np.arange(len(ra2)), set(imatch))
"""
ra1,ra2,dec1,dec2 = map(np.asarray, (ra1, ra2, dec1, dec2))
abs = np.abs
isorted = ra2.argsort()
sdec2 = dec2[isorted]
sra2 = ra2[isorted]
LIM = tol * DEG_PER_ASEC
match = []
# use mean dec, assumes decs similar
decav = np.mean(sdec2.mean() + dec1.mean())
RA_LIM = LIM / cos(decav * RAD_PER_DEG)
for ra,dec in zip(ra1,dec1):
i1 = sra2.searchsorted(ra - RA_LIM)
i2 = i1 + sra2[i1:].searchsorted(ra + RA_LIM)
#print(i1,i2)
close = []
for j in xrange(i1,i2):
if abs(dec - sdec2[j]) > LIM:
continue
else:
# if ras and decs are within LIM arcsec, then
# calculate actual separation:
disq = ang_sep(ra, dec, sra2[j], sdec2[j])
close.append((disq, j))
close.sort()
if not allmatches:
# Choose the object with the closest separation inside the
# requested tolerance, if one was found.
if len(close) > 0:
min_dist, jmin = close[0]
if min_dist < LIM:
match.append((isorted[jmin], min_dist))
continue
# otherwise no match
match.append((-1,-1))
else:
# append all the matching objects
jclose = []
seps = []
for dist,j in close:
if dist < LIM:
jclose.append(j)
seps.append(dist)
else:
break
match.append(fromarrays([isorted[jclose], seps],
dtype=[(str('ind'),str('i8')),
str(('sep'),str('f8'))
]))
if not allmatches:
# return both indices and separations in a recarray
temp = np.rec.fromrecords(match, names=str('ind,sep'))
# change to arcseconds
temp.sep *= 3600.
temp.sep[temp.sep < 0] = -1.
return temp
else:
return match
def indmatch(ra1, dec1, ra2, dec2, tol):
""" Finds objects in ra1, dec1 that have a matching object in
ra2, dec2 within tol arcsec.
Parameters
----------
ra1, dec1 : arrays of floats, shape (N,)
First list of coordinates in degrees.
ra2, dec2 : arrays of floats, shape (M,)
Second list of coordinates in degrees.
Returns
-------
i1, i2 : arrays of int, shape (P,)
`i1` are the indices into ra1,dec1 that have matches in the ra2,
dec2. `i2` are the indices into ra2,dec2 giving the matching objects.
See Also
--------
match, unique_radec
"""
m = match(ra1, dec1, ra2, dec2, tol)
c = m.ind > -1
i1 = c.nonzero()[0]
i2 = m.ind[c]
return i1, i2
def unique_radec(ra, dec, tol):
""" Find unique ras and decs in a list of coordinates.
RA and Dec must be arrays of the same length, and in degrees.
tol is the tolerance for matching in arcsec. Any coord separated by
less that this amount are assumed to be the same.
Returns
-------
ind1 : ndarray of ints, shape (N,)
Indices of the first occurence of a unique coordinate in the
input array.
ind2 : list of int arrays, length N
Indices of all coords that were matched to a given unique coordinate.
See Also
--------
indmatch, match
The matching algorithm is confusing, but hopefully correct and not too
slow. Potential for improvement...
"""
matches = match(ra, dec, ra, dec, tol, allmatches=True)
imatchflat = []
for m in matches:
imatchflat.extend(m.ind)
inomatch = np.setdiff1d(np.arange(len(ra)), list(set(imatchflat)))
assert len(inomatch) == 0
# Indices giving unique ra, decs
iunique = []
# Will be same length as iunique. Gives all indices in original
# coords that are matched to each unique coord.
iextras = []
assigned = set()
for j,m in enumerate(matches):
if not (j % 1000):
print(j)
# get the lowest index in this group
isort = sorted(m.ind)
ilow = isort[0]
if ilow not in assigned:
iunique.append(ilow)
assigned.add(ilow)
iextras.append([ilow])
# assign any extra indices to this unique coord.
for i in isort[1:]:
# check not already been assigned to another coord
if i not in assigned:
iextras[-1].append(i)
assigned.add(i)
return np.array(iunique), iextras
| {
"repo_name": "nhmc/LAE",
"path": "python_modules/barak/coord.py",
"copies": "1",
"size": "13180",
"license": "mit",
"hash": 8158020738421736000,
"line_mean": 28.5515695067,
"line_max": 76,
"alpha_frac": 0.5662367223,
"autogenerated": false,
"ratio": 3.3082329317269075,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.9354530701916234,
"avg_score": 0.0039877904221348424,
"num_lines": 446
} |
"""Astronomical observations calculations.
"""
import numpy as np
from zcode.constants import PC, SPLC
# VEGA/Johnson/Bessell: http://web.ipac.caltech.edu/staff/fmasci/home/astro_refs/magsystems.pdf
# SDSS/AB/Fukugita: http://www.astronomy.ohio-state.edu/~martini/usefuldata.html
# These wavelengths are in [cm]
BAND_EFF_LOC = {
# Vega/Johnson/Bessell
"U": {"l": 366e-7},
"B": {"l": 438e-7},
"V": {"l": 545e-7},
"R": {"l": 641e-7},
"I": {"l": 798e-7},
# SDSS AB Magnitudes
"u": {"l": 356e-7},
"g": {"l": 483e-7},
"r": {"l": 626e-7},
"i": {"l": 767e-7},
"z": {"l": 910e-7}
}
BAND_REF_FLUX = {
# Vega/Johnson/Bessell
"U": {"f": 1.790, "l": 417.5},
"B": {"f": 4.063, "l": 632.0},
"V": {"f": 2.636, "l": 363.1},
"R": {"f": 3.064, "l": 217.7},
"I": {"f": 2.416, "l": 112.6},
# SDSS AB Magnitudes
"u": {"f": 3.631, "l": 859.5},
"g": {"f": 3.631, "l": 466.9},
"r": {"f": 3.631, "l": 278.0},
"i": {"f": 3.631, "l": 185.2},
"z": {"f": 3.631, "l": 131.5}
}
BAND_ZERO_POINT = {
# Vega/Johnson/Bessell
"U": {"f": +0.770, "l": -0.152},
"B": {"f": -0.120, "l": -0.602},
"V": {"f": +0.000, "l": +0.000},
"R": {"f": +0.186, "l": +0.555},
"I": {"f": +0.444, "l": +1.271},
# SDSS AB Magnitudes
"u": {"f": 0.0, "l": 0.0},
"g": {"f": 0.0, "l": 0.0},
"r": {"f": 0.0, "l": 0.0},
"i": {"f": 0.0, "l": 0.0},
"z": {"f": 0.0, "l": 0.0}
}
UNITS = {
"f": 1.0e-20, # erg/s/Hz/cm^2
"l": 1.0e-11 # erg/s/Angstrom/cm^2
}
__all__ = ["ABmag_to_flux", "abs_mag_to_lum", "flux_to_mag", "lum_to_abs_mag", "mag_to_flux",
"fnu_to_flambda", "flambda_to_fnu"]
# _band_name = ['u', 'b', 'v', 'r', 'i']
# _band_wlen = [365, 445, 551, 658, 806] # nm
# _band_color = ['violet', 'blue', 'green', 'red', 'darkred']
# Band = namedtuple('band', ['name', 'freq', 'wlen', 'color'])
#
# BANDS = {nn: Band(nn, SPLC/(ll*1e-7), ll*1e-7, cc)
# for nn, ll, cc in zip(_band_name, _band_wlen, _band_color)}
def _get_units_type(type):
try:
units = UNITS[type]
except Exception as err:
raise ValueError("Unrecognized `type` = '{}'".format(type))
return units, type
def ABmag_to_flux(mag):
"""Convert from AB Magnitude to spectral-flux density.
See: http://web.ipac.caltech.edu/staff/fmasci/home/astro_refs/magsystems.pdf
Returns
-------
fnu : () scalar
Spectral-flux density in units of [erg/s/cm^2/Hz]
"""
fnu = np.power(10.0, (mag + 48.6)/-2.5)
return fnu
def mag_to_flux(band, mag, type='f'):
"""Convert from broad-band filter magnitude (e.g. Johnson) to flux.
Returns
-------
flux : () scalar
Flux in either [erg/s/cm^2/Hz] or [erg/s/cm^2/Angstrom] depending on `type`.
"""
mag = np.asarray(mag)
units, type = _get_units_type(type)
if band not in BAND_REF_FLUX.keys():
raise ValueError("Unrecognized `band` = '{}'".format(band))
ref_flux = BAND_REF_FLUX[band][type] * units
# zero_point = BAND_ZERO_POINT[band][type]
flux = ref_flux * np.power(10.0, mag/-2.5)
return flux
def flux_to_mag(band, flux, type='f'):
"""Convert from broad-band filter magnitude (e.g. Johnson) to flux.
Arguments
---------
band
flux : () scalar
Flux in either [erg/s/cm^2/Hz] or [erg/s/cm^2/Angstrom] depending on `type`.
type
Returns
-------
mag
"""
flux = np.asarray(flux)
units, type = _get_units_type(type)
if band not in BAND_REF_FLUX.keys():
raise ValueError("Unrecognized `band` = '{}'".format(band))
ref_flux = BAND_REF_FLUX[band][type] * units
# zero_point = BAND_ZERO_POINT[band][type]
# flux = ref_flux * np.power(10.0, mag/-2.5)
mag = -2.5 * np.log10(flux/ref_flux)
return mag
def abs_mag_to_lum(band, mag, type='f'):
mag = np.asarray(mag)
if type.lower().startswith('f'):
type = 'f'
units = 1.0e-20 # erg/s/Hz/cm^2
elif (type.lower().startswith('l') or type.lower().startswith('w')):
type = 'l'
units = 1.0e-11 # erg/s/Angstrom/cm^2
else:
raise ValueError("Unrecognized `type` = '{}'".format(type))
if band not in BAND_REF_FLUX.keys():
raise ValueError("Unrecognized `band` = '{}'".format(band))
ref_flux = BAND_REF_FLUX[band][type]
lum = 4.0 * np.pi * ref_flux * units * PC**2 * np.power(10.0, 2-mag/2.5)
return lum
def lum_to_abs_mag(band, lum, type='f'):
lum = np.asarray(lum)
if type.lower().startswith('f'):
type = 'f'
units = 1.0e-20 # erg/s/Hz/cm^2
elif (type.lower().startswith('l') or type.lower().startswith('w')):
type = 'l'
units = 1.0e-11 # erg/s/Angstrom/cm^2
else:
raise ValueError("Unrecognized `type` = '{}'".format(type))
if band not in BAND_REF_FLUX.keys():
raise ValueError("Unrecognized `band` = '{}'".format(band))
ref_lum = BAND_REF_FLUX[band][type] * 4.0 * np.pi * units * (10*PC)**2
mag = lum/ref_lum
mag = -2.5 * np.log10(mag) + 5
return mag
def fnu_to_flambda(fnu, freq=None, wavelength=None):
if freq is None:
freq = SPLC / wavelength
flambda = fnu * freq**2 / SPLC
return flambda
def flambda_to_fnu(flambda, freq=None, wavelength=None):
if wavelength is None:
wavelength = SPLC / freq
fnu = flambda * freq**2 / SPLC
return fnu
| {
"repo_name": "lzkelley/zcode",
"path": "zcode/astro/obs.py",
"copies": "1",
"size": "5441",
"license": "mit",
"hash": 7870281733342599000,
"line_mean": 26.3417085427,
"line_max": 95,
"alpha_frac": 0.5353795258,
"autogenerated": false,
"ratio": 2.5096863468634685,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.8543325276323559,
"avg_score": 0.00034811926798193133,
"num_lines": 199
} |
""" Astronomy related functions """
from functools import wraps
import numpy as np
try:
import dask
except ImportError:
dask = None
def dask_compatibility(fn):
""" Make functions transparent to using dask delayed objects """
@wraps(fn)
def wrapped(*args, **kwargs):
try:
return fn(*args, **kwargs)
except Exception as issue:
if dask is None:
raise issue
return dask.delayed(fn)(*args, **kwargs).compute()
return wrapped
def nside2npix(nside):
"""Give the number of pixels for the given nside.
Parameters
----------
nside : int
healpix nside parameter; an exception is raised if nside is not valid
(nside must be a power of 2, less than 2**30)
Returns
-------
npix : int
corresponding number of pixels
Notes
-----
Raise a ValueError exception if nside is not valid.
Examples
--------
>>> import numpy as np
>>> nside2npix(8)
768
"""
return 12 * nside * nside
def gaia_healpix_expression(healpix_expression="source_id/34359738368",
healpix_max_level=12, healpix_level=8):
"""
Give the healpix expression from the Gaia source_id at
a given healpix level
Parameters
----------
healpix_expression: str
field name and conversion to healpix cell
healpix_max_level: int
expression corresponding level
healpix_level: int
desired healpix level from the data
Returns
-------
expression: str
final expression
"""
reduce_level = healpix_max_level - healpix_level
# NSIDE = 2 ** healpix_level
# nmax = nside2npix(NSIDE)
scaling = 4 ** reduce_level
# epsilon = 1. / scaling / 2
expression = "%s/%s" % (healpix_expression, scaling)
return expression
@dask_compatibility
def get_healpix_grid(data, healpix_level):
"""Convert a dataframe to the dense grid
Parameters
----------
data: pd.DataFrame
data from a database query
healpix_level: int
level of the query
Returns
-------
grid: np.array (npix, )
dense grid of npix(healpix_level) with the data values
"""
grid = np.zeros(nside2npix(2 ** healpix_level), dtype=data.n.values.dtype)
grid[data.hpx] = data.n
return grid
def healpix_grid_plot(fgrid, what_label=None, cmap="afmhot",
grid_limits=None, healpix_input="equatorial",
healpix_output="galactic", image_size=800, nest=True,
norm=None, title="", smooth=None,
colorbar=True, rotation=(0, 0, 0), **kwargs):
""" Plot data from healpix configuration
what_label: str
colorbar label
cmap: str or cmap instance
colormap used by matplotlib
healpix_input: str
Specificy if the healpix index is in
"equatorial", "galactic" or "ecliptic".
healpix_output: str
Plot in "equatorial", "galactic" or "ecliptic".
grid_limits: tuple, optional
[minvalue, maxvalue] value that map to the colormap
(values below and above these are clipped to the the min/max).
image_size: int
size for the image that healpy uses for rendering
nest: boolean
If the healpix data is in nested (True) or ring (False)
title: str
Title of figure
smooth: float
apply gaussian smoothing, in degrees
rotation: tuple(3)
Rotate the plot, in format (lon, lat, psi)
such that (lon, lat) is the center,
and rotate on the screen by angle psi. All angles are degrees.
norm : {'hist', 'log', None}
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
"""
import healpy as hp
from matplotlib import colors
import warnings
# Compatibility filter
vmin = kwargs.pop('vmin', None)
vmax = kwargs.pop('vmax', None)
if grid_limits is None:
grid_limits = [vmin, vmax]
if isinstance(norm, colors.LogNorm):
norm = 'log'
if grid_limits[0] is None:
grid_limits[0] = 1
if smooth:
if nest:
grid = hp.reorder(fgrid, inp="NEST", out="RING")
nest = False
# grid[np.isnan(grid)] = np.nanmean(grid)
grid = hp.smoothing(fgrid, sigma=np.radians(smooth))
else:
grid = fgrid
if grid_limits:
grid_min, grid_max = grid_limits
else:
grid_min = grid_max = None
func = hp.mollview
coord_map = dict(equatorial='C', galactic='G', ecliptic="E")
coord = coord_map[healpix_input], coord_map[healpix_output]
if coord_map[healpix_input] == coord_map[healpix_output]:
coord = None
with warnings.catch_warnings():
warnings.filterwarnings("ignore")
return func(grid, unit=what_label, rot=rotation, nest=nest,
title=title, coord=coord, cmap=cmap, hold=True,
xsize=image_size, min=grid_min, norm=norm,
max=grid_max, cbar=colorbar, **kwargs)
@dask_compatibility
def add_column_healpix(self, name="healpix", longitude="ra", latitude="dec",
degrees=True, healpix_order=12, nest=True):
"""Add a healpix (in memory) column based on a longitude and latitude
Parameters
----------
name: str
name of the column
longitude: str
expression of the longitude (or right-ascension) coordinate
(astronomical convenction latitude=90 is north pole)
latitude: str
expression of the latitude (or declinasion) coordinate
degrees: boolean
If lon/lat are in degrees (default) or radians.
healpix_order: int
healpix order, >= 0
nest: boolean
Nested healpix (default) or ring.
"""
import healpy as hp
if degrees:
scale = np.pi / 180.
else:
scale = 1.
phi = self[longitude] * scale
theta = np.pi / 2 - self[latitude] * scale
hp_index = hp.ang2pix(hp.order2nside(healpix_order), theta, phi, nest=nest)
try:
self.add_column(name, hp_index)
except AttributeError:
self[name] = hp_index
return self
@dask_compatibility
def project_aitoff(alphain, deltain, radians=True):
"""Add aitoff (https://en.wikipedia.org/wiki/Aitoff_projection) projection
TODO: optimize for DASK DataFrame
Parameters
----------
alpha: array
azimuth angle
delta: array
polar angle
radians: boolean
input and output in radians (True), or degrees (False)
returns
-------
x: ndarray
x coordinate
y: ndarray
y coordinate
"""
try:
transform = 1. if radians else np.pi / 180.
alpha = np.copy(alphain)
if not radians:
ind = alphain > 180
alpha[ind] = alphain[ind] - 360
else:
ind = alphain > np.pi
alpha[ind] = alphain[ind] - 2. * np.pi
delta = deltain
aitoff_alpha = np.arccos(np.cos(delta * transform) *
np.cos(0.5 * alpha * transform))
x = (2 * np.cos(delta * transform) * np.sin(0.5 * alpha * transform) /
np.sinc(aitoff_alpha / np.pi) / np.pi)
y = np.sin(delta * transform) / np.sinc(aitoff_alpha / np.pi) / np.pi
return x, y
except ValueError as issue:
# dask df are not playing nice with the above
try:
import dask
return dask.delayed(project_aitoff)(alphain, deltain, radians)\
.compute()
except ImportError:
raise issue
def add_aitoff_projections(self, alpha, delta, x, y, radians=False):
"""Add aitoff (https://en.wikipedia.org/wiki/Aitoff_projection) projection
Parameters
----------
alpha: array
azimuth angle
delta: array
polar angle
radians: boolean
input and output in radians (True), or degrees (False)
x: str
output name for x coordinate
y: str
output name for y coordinate
returns
-------
x: ndarray
output name for x coordinate
y: ndarray
output name for y coordinate
"""
x_, y_ = project_aitoff(self[alpha], self[delta], radians=radians)
try:
self.add_column(x, x_)
except AttributeError:
self[x] = x_
try:
self.add_column(y, y_)
except AttributeError:
self[y] = y_
return self
def find_matching_parenthesis(string):
""" Find recursively groups of balanced parenthesis """
stack = 0
startIndex = None
results = []
for i, c in enumerate(string):
if c == '(':
if stack == 0:
startIndex = i + 1 # string to extract starts one index later
# push to stack
stack += 1
elif c == ')':
# pop stack
stack -= 1
if stack == 0:
results.append(string[startIndex:i])
rprime = [find_matching_parenthesis(rk) for rk in results if len(results)]
if len(results):
if len(rprime):
return results + rprime
else:
return results
def flatten(lst):
""" Flatten a nest list or nested sequence of values """
res = []
for k in lst:
if isinstance(k, (list, tuple)):
res.extend(flatten(k))
else:
if k is not None:
res.append(k)
return res
def healpix_plot(self, healpix_expression='healpix', healpix_level=8,
what='count(*)', grid=None,
healpix_input='equatorial', healpix_output='galactic',
norm=None, cmap='afmhot', grid_limits=None,
image_size=800, nest=True,
title='', smooth=None, colorbar=True,
rotation=(0, 0, 0), **kwargs):
""" Plot data from healpix configuration
what_label: str
colorbar label
cmap: str or cmap instance
colormap used by matplotlib
grid: ndarray
healpix grid of size nside2npix(2 ** level)
healpix_input: str
Specificy if the healpix index is in
"equatorial", "galactic" or "ecliptic".
healpix_output: str
Plot in "equatorial", "galactic" or "ecliptic".
grid_limits: tuple, optional
[minvalue, maxvalue] value that map to the colormap
(values below and above these are clipped to the the min/max).
image_size: int
size for the image that healpy uses for rendering
nest: boolean
If the healpix data is in nested (True) or ring (False)
title: str
Title of figure
smooth: float
apply gaussian smoothing, in degrees
rotation: tuple(3)
Rotate the plot, in format (lon, lat, psi)
such that (lon, lat) is the center,
and rotate on the screen by angle psi. All angles are degrees.
norm : {'hist', 'log', None}
Color normalization, hist= histogram equalized color mapping,
log= logarithmic color mapping, default: None (linear color mapping)
"""
from scipy.stats import binned_statistic
if grid is None:
try:
what_ = find_matching_parenthesis(what)[0]
except TypeError:
what_ = what
func = what.replace(what_, '')[:-2] # remove ()
if what_ in ('*', ):
value = self[healpix_expression]
else:
value = self[what_]
binned_statistic_ = dask_compatibility(binned_statistic)
bins = np.arange(nside2npix(2 ** healpix_level) + 1)
grid = binned_statistic_(self[healpix_expression],
value, bins=bins, statistic=func).statistic
return healpix_grid_plot(grid, what_label=what,
grid_limits=grid_limits,
healpix_input=healpix_input,
healpix_output=healpix_output,
image_size=image_size, nest=nest, norm=norm,
title=title, smooth=smooth,
cmap=cmap, rotation=rotation, **kwargs)
| {
"repo_name": "mfouesneau/ezdata",
"path": "ezdata/astro/astro.py",
"copies": "1",
"size": "12200",
"license": "mit",
"hash": -730383231188295400,
"line_mean": 29.3482587065,
"line_max": 79,
"alpha_frac": 0.5832786885,
"autogenerated": false,
"ratio": 3.881641743557111,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.4964920432057111,
"avg_score": null,
"num_lines": null
} |
""" Astropy coordinate class for the Magellanic Stream coordinate system """
from astropy.coordinates.matrix_utilities import (rotation_matrix,
matrix_product,
matrix_transpose)
from astropy.coordinates.baseframe import (frame_transform_graph,
BaseCoordinateFrame,
RepresentationMapping)
from astropy.coordinates.transformations import StaticMatrixTransform
from astropy.coordinates import representation as r
from astropy.coordinates import Galactic
import astropy.units as u
__all__ = ["MagellanicStreamNidever08", "MagellanicStream"]
class MagellanicStreamNidever08(BaseCoordinateFrame):
"""
A coordinate or frame aligned with the Magellanic Stream,
as defined by Nidever et al. (2008,
see: `<http://adsabs.harvard.edu/abs/2008ApJ...679..432N>`_).
For more information about this class, see the Astropy documentation
on coordinate frames in :mod:`~astropy.coordinates`.
Examples
--------
Converting the coordinates of the Large Magellanic Cloud:
>>> from astropy import coordinates as coord
>>> from astropy import units as u
>>> from gala.coordinates import MagellanicStreamNidever08
>>> c = coord.Galactic(l=280.4652*u.deg, b=-32.8884*u.deg)
>>> ms = c.transform_to(MagellanicStreamNidever08)
>>> print(ms)
<MagellanicStreamNidever08 Coordinate: (L, B) in deg
(-0.13686116, 2.42583948)>
"""
frame_specific_representation_info = {
r.SphericalRepresentation: [
RepresentationMapping('lon', 'L'),
RepresentationMapping('lat', 'B')
]
}
default_representation = r.SphericalRepresentation
default_differential = r.SphericalCosLatDifferential
_ngp = Galactic(l=188.5*u.deg, b=-7.5*u.deg)
_lon0 = Galactic(l=280.47*u.deg, b=-32.75*u.deg)
_default_wrap_angle = 180*u.deg
def __init__(self, *args, **kwargs):
wrap = kwargs.pop('wrap_longitude', True)
super().__init__(*args, **kwargs)
if wrap and isinstance(self._data, (r.UnitSphericalRepresentation,
r.SphericalRepresentation)):
self._data.lon.wrap_angle = self._default_wrap_angle
# TODO: remove this. This is a hack required as of astropy v3.1 in order
# to have the longitude components wrap at the desired angle
def represent_as(self, base, s='base', in_frame_units=False):
r = super().represent_as(base, s=s, in_frame_units=in_frame_units)
r.lon.wrap_angle = self._default_wrap_angle
return r
represent_as.__doc__ = BaseCoordinateFrame.represent_as.__doc__
@frame_transform_graph.transform(StaticMatrixTransform,
Galactic, MagellanicStreamNidever08)
def gal_to_mag():
mat1 = rotation_matrix(57.275785782128686*u.deg, 'z')
mat2 = rotation_matrix(90*u.deg - MagellanicStreamNidever08._ngp.b, 'y')
mat3 = rotation_matrix(MagellanicStreamNidever08._ngp.l, 'z')
return matrix_product(mat1, mat2, mat3)
@frame_transform_graph.transform(StaticMatrixTransform,
MagellanicStreamNidever08, Galactic)
def mag_to_gal():
return matrix_transpose(gal_to_mag())
# TODO: remove this in next version
class MagellanicStream(MagellanicStreamNidever08):
def __init__(self, *args, **kwargs):
import warnings
warnings.warn("This frame is deprecated. Use MagellanicStreamNidever08 "
"instead.", DeprecationWarning)
super().__init__(*args, **kwargs)
trans = frame_transform_graph.get_transform(MagellanicStreamNidever08,
Galactic).transforms[0]
frame_transform_graph.add_transform(MagellanicStream, Galactic, trans)
trans = frame_transform_graph.get_transform(Galactic,
MagellanicStreamNidever08).transforms[0]
frame_transform_graph.add_transform(Galactic, MagellanicStream, trans)
| {
"repo_name": "adrn/gala",
"path": "gala/coordinates/magellanic_stream.py",
"copies": "2",
"size": "4143",
"license": "mit",
"hash": 4092217780422355500,
"line_mean": 39.2233009709,
"line_max": 84,
"alpha_frac": 0.6401158581,
"autogenerated": false,
"ratio": 3.7324324324324323,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 0.5372548290532432,
"avg_score": null,
"num_lines": null
} |
""" Astropy coordinate class for the Ophiuchus coordinate system """
# Third-party
import numpy as np
import astropy.units as u
import astropy.coordinates as coord
from astropy.coordinates import frame_transform_graph
from astropy.coordinates.matrix_utilities import matrix_transpose
__all__ = ["OphiuchusPriceWhelan16", "Ophiuchus"]
class OphiuchusPriceWhelan16(coord.BaseCoordinateFrame):
"""
A Heliocentric spherical coordinate system defined by the orbit
of the Ophiuchus stream, as described in
Price-Whelan et al. 2016 (see: `<https://arxiv.org/abs/1601.06790>`_).
For more information about this class, see the Astropy documentation
on coordinate frames in :mod:`~astropy.coordinates`.
Parameters
----------
representation : :class:`~astropy.coordinates.BaseRepresentation` or None
A representation object or None to have no data (or use the other keywords)
phi1 : angle_like, optional, must be keyword
The longitude-like angle corresponding to Ophiuchus's orbit.
phi2 : angle_like, optional, must be keyword
The latitude-like angle corresponding to Ophiuchus's orbit.
distance : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
pm_phi1_cosphi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the longitude-like direction corresponding to
the Ophiuchus stream's orbit.
pm_phi2 : :class:`~astropy.units.Quantity`, optional, must be keyword
The proper motion in the latitude-like direction perpendicular to the
Ophiuchus stream's orbit.
radial_velocity : :class:`~astropy.units.Quantity`, optional, must be keyword
The Distance for this object along the line-of-sight.
"""
default_representation = coord.SphericalRepresentation
default_differential = coord.SphericalCosLatDifferential
frame_specific_representation_info = {
coord.SphericalRepresentation: [
coord.RepresentationMapping('lon', 'phi1'),
coord.RepresentationMapping('lat', 'phi2'),
coord.RepresentationMapping('distance', 'distance')]
}
_default_wrap_angle = 180*u.deg
def __init__(self, *args, **kwargs):
wrap = kwargs.pop('wrap_longitude', True)
super().__init__(*args, **kwargs)
if wrap and isinstance(self._data, (coord.UnitSphericalRepresentation,
coord.SphericalRepresentation)):
self._data.lon.wrap_angle = self._default_wrap_angle
# TODO: remove this. This is a hack required as of astropy v3.1 in order
# to have the longitude components wrap at the desired angle
def represent_as(self, base, s='base', in_frame_units=False):
r = super().represent_as(base, s=s, in_frame_units=in_frame_units)
r.lon.wrap_angle = self._default_wrap_angle
return r
represent_as.__doc__ = coord.BaseCoordinateFrame.represent_as.__doc__
# Rotation matrix
R = np.array([[0.84922096554, 0.07001279040, 0.52337554476],
[-0.27043653641, -0.79364259852, 0.54497294023],
[0.45352820359, -0.60434231606, -0.65504391727]])
@frame_transform_graph.transform(coord.StaticMatrixTransform,
coord.Galactic, OphiuchusPriceWhelan16)
def gal_to_oph():
""" Compute the transformation from Galactic spherical to
heliocentric Ophiuchus coordinates.
"""
return R
@frame_transform_graph.transform(coord.StaticMatrixTransform,
OphiuchusPriceWhelan16, coord.Galactic)
def oph_to_gal():
""" Compute the transformation from heliocentric Ophiuchus coordinates to
spherical Galactic.
"""
return matrix_transpose(gal_to_oph())
# TODO: remove this in next version
class Ophiuchus(OphiuchusPriceWhelan16):
def __init__(self, *args, **kwargs):
import warnings
warnings.warn("This frame is deprecated. Use OphiuchusPriceWhelan16"
" instead.", DeprecationWarning)
super().__init__(*args, **kwargs)
trans = frame_transform_graph.get_transform(OphiuchusPriceWhelan16,
coord.ICRS).transforms[0]
frame_transform_graph.add_transform(Ophiuchus, coord.ICRS, trans)
trans = frame_transform_graph.get_transform(coord.ICRS,
OphiuchusPriceWhelan16).transforms[0]
frame_transform_graph.add_transform(coord.ICRS, Ophiuchus, trans)
| {
"repo_name": "adrn/gary",
"path": "gala/coordinates/oph.py",
"copies": "2",
"size": "4558",
"license": "mit",
"hash": 2245724480375179300,
"line_mean": 40.8165137615,
"line_max": 83,
"alpha_frac": 0.6772707328,
"autogenerated": false,
"ratio": 3.869269949066214,
"config_test": false,
"has_no_keywords": false,
"few_assignments": false,
"quality_score": 1,
"avg_score": 0.0007408091034362337,
"num_lines": 109
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.