repo_name stringlengths 7 65 | path stringlengths 5 185 | copies stringlengths 1 4 | size stringlengths 4 6 | content stringlengths 977 990k | license stringclasses 14 values | hash stringlengths 32 32 | line_mean float64 7.18 99.4 | line_max int64 31 999 | alpha_frac float64 0.25 0.95 | ratio float64 1.5 7.84 | autogenerated bool 1 class | config_or_test bool 2 classes | has_no_keywords bool 2 classes | has_few_assignments bool 1 class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
the-blue-alliance/the-blue-alliance | src/backend/common/consts/fcm/platform_priority.py | 1 | 2306 | from __future__ import annotations
import enum
from typing import Dict
from backend.common.consts.fcm.platform_type import PlatformType
@enum.unique
class PlatformPriority(enum.IntEnum):
"""
Constants regarding the priority of a push notification.
"""
NORMAL = 0
HIGH = 1
@staticmethod
def validate(platform_priority: PlatformPriority) -> None:
"""Validate that the platform_priority is supported.
Raises:
ValueError: If platform_priority is an unsupported platform priority.
"""
if platform_priority not in list(PlatformPriority):
raise ValueError(
"Unsupported platform_priority: {}".format(platform_priority)
)
@staticmethod
def platform_priority(
platform_type: PlatformType, platform_priority: PlatformPriority
) -> str:
from backend.common.consts.fcm.platform_type import PlatformType
# Validate that platform_type is supported
PlatformType.validate(platform_type)
# Validate that platform_priority is supported
PlatformPriority.validate(platform_priority)
# https://firebase.google.com/docs/reference/fcm/rest/v1/projects.messages#androidmessagepriority
ANDROID: Dict[PlatformPriority, str] = {
PlatformPriority.NORMAL: "normal",
PlatformPriority.HIGH: "high",
}
# https://developer.apple.com/library/archive/documentation/NetworkingInternet/Conceptual/RemoteNotificationsPG/CommunicatingwithAPNs.html#//apple_ref/doc/uid/TP40008194-CH11-SW1
APNS: Dict[PlatformPriority, str] = {
PlatformPriority.NORMAL: "5",
PlatformPriority.HIGH: "10",
}
# Note: Do not use HIGH for iOS notifications when notifications only contain content-available
# https://developers.google.com/web/fundamentals/push-notifications/web-push-protocol#urgency
WEB: Dict[PlatformPriority, str] = {
PlatformPriority.NORMAL: "normal",
PlatformPriority.HIGH: "high",
}
if platform_type == PlatformType.ANDROID:
return ANDROID[platform_priority]
elif platform_type == PlatformType.APNS:
return APNS[platform_priority]
return WEB[platform_priority]
| mit | 9b4f923e54faa16311b4500878bc5296 | 35.03125 | 186 | 0.67216 | 4.47767 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/controllers/admin/admin_offseason_scraper_controller.py | 8 | 3271 | import datetime
import logging
import os
from google.appengine.ext import ndb
from google.appengine.ext.webapp import template
from controllers.base_controller import LoggedInHandler
from datafeeds.datafeed_usfirst_offseason import DatafeedUsfirstOffseason
from consts.event_type import EventType
from helpers.event_manipulator import EventManipulator
from models.event import Event
class AdminOffseasonScraperController(LoggedInHandler):
"""
View and add un-added offseasons from FIRST's site
"""
def get(self):
self._require_admin()
df = DatafeedUsfirstOffseason()
new_events = df.getEventList()
old_events = Event.query().filter(
Event.event_type_enum == EventType.OFFSEASON).filter(
Event.year == datetime.datetime.now().year).filter(
Event.first_eid != None).fetch(100)
old_first_eids = [event.first_eid for event in old_events]
truly_new_events = [event for event in new_events if event.first_eid not in old_first_eids]
self.template_values.update({
"events": truly_new_events,
"event_key": self.request.get("event_key"),
"success": self.request.get("success"),
})
path = os.path.join(os.path.dirname(__file__), '../../templates/admin/offseasons.html')
self.response.out.write(template.render(path, self.template_values))
def post(self):
self._require_admin()
if self.request.get("submit") == "duplicate":
old_event = Event.get_by_id(self.request.get("duplicate_event_key"))
old_event.first_eid = self.request.get("event_first_eid")
old_event.dirty = True # TODO: hacky
EventManipulator.createOrUpdate(old_event)
self.redirect("/admin/offseasons?success=duplicate&event_key=%s" % self.request.get("duplicate_event_key"))
return
if self.request.get("submit") == "create":
start_date = None
if self.request.get("event_start_date"):
start_date = datetime.datetime.strptime(self.request.get("event_start_date"), "%Y-%m-%d")
end_date = None
if self.request.get("event_end_date"):
end_date = datetime.datetime.strptime(self.request.get("event_end_date"), "%Y-%m-%d")
event_key = str(self.request.get("event_year")) + str.lower(str(self.request.get("event_short")))
event = Event(
id=event_key,
event_type_enum=int(self.request.get("event_type_enum")),
event_short=self.request.get("event_short"),
first_eid=self.request.get("event_first_eid"),
name=self.request.get("event_name"),
year=int(self.request.get("event_year")),
start_date=start_date,
end_date=end_date,
city=self.request.get("city"),
state_prov=self.request.get("state_prov"),
country=self.request.get("country"),
)
event = EventManipulator.createOrUpdate(event)
self.redirect("/admin/offseasons?success=create&event_key=%s" % event_key)
return
self.redirect("/admin/offseasons")
| mit | b7fbcf6cf33e84e6c153165dc2430a35 | 37.482353 | 119 | 0.612351 | 3.803488 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/web/handlers/tests/helpers.py | 1 | 11612 | import json
import re
from datetime import datetime, timedelta
from typing import Generator, List, NamedTuple, Optional, Tuple
import bs4
from google.appengine.ext import ndb
from pyre_extensions import none_throws
from backend.common.consts.event_type import EventType
from backend.common.models.district import District
from backend.common.models.district_team import DistrictTeam
from backend.common.models.event import Event
from backend.common.models.event_team import EventTeam
from backend.common.models.keys import DistrictKey, EventKey, TeamNumber
from backend.common.models.team import Team
class TeamCurrentEvent(NamedTuple):
event_key: Optional[str]
webcast: Optional[Tuple[str, str]] # (link, text)
currently_competing: Optional[str]
upcoming_matches: Optional[bool]
class TeamInfo(NamedTuple):
header: str
location: Optional[str]
full_name: Optional[str]
rookie_year: Optional[str]
last_competed: Optional[str]
website: Optional[str]
home_cmp: Optional[str]
hof: Optional[str]
district: Optional[str]
district_link: Optional[str]
social_media: Optional[List[Tuple[str, str]]] # tuple of (slug_name, foreign_key)
preferred_medias: Optional[
List[Tuple[str, str]]
] # tuple of (slug_name, foreign_key)
current_event: Optional[TeamCurrentEvent]
class TeamEventParticipation(NamedTuple):
event_name: str
class TeamEventHistory(NamedTuple):
year: int
event: str
awards: List[str]
class TeamHOFInfo(NamedTuple):
team_number: int
year: int
event: str
class ParsedTeam(NamedTuple):
team_number: TeamNumber
team_number_link: Optional[str]
team_name: str
team_name_link: Optional[str]
team_location: str
@ndb.synctasklet
def preseed_team(team_number: TeamNumber) -> Generator:
yield Team(
id=f"frc{team_number}",
team_number=team_number,
nickname=f"The {team_number} Team",
name="The Blue Alliance / Some High School",
city="New York",
state_prov="NY",
country="USA",
website="https://www.thebluealliance.com",
rookie_year=2008,
).put_async()
@ndb.synctasklet
def preseed_event(event_key: EventKey) -> Generator:
year = int(event_key[:4])
yield Event(
id=event_key,
event_short=event_key[4:],
year=year,
name="Test Event",
event_type_enum=EventType.OFFSEASON,
start_date=datetime(year, 3, 1),
end_date=datetime(year, 3, 5),
webcast_json=json.dumps(
[
{"type": "twitch", "channel": "robosportsnetwork"},
{"type": "twitch", "channel": "firstinspires"},
]
),
).put_async()
@ndb.synctasklet
def preseed_district(district_key: DistrictKey) -> Generator:
year = int(district_key[:4])
yield District(
id=district_key,
year=year,
abbreviation=district_key[4:],
display_name=district_key[4:].upper(),
).put_async()
yield ndb.put_multi_async(
[
Event(
id=f"{year}event{i}",
event_short=f"event{i}",
year=year,
name=f"Event {i}",
district_key=ndb.Key(District, district_key),
event_type_enum=EventType.DISTRICT,
official=True,
start_date=datetime(year, 3, 1) + timedelta(days=7 * i),
end_date=datetime(year, 3, 3) + timedelta(days=7 * i),
)
for i in range(1, 6)
]
)
yield ndb.put_multi_async(
[
Team(
id=f"frc{i}",
team_number=i,
nickname=f"The {i} Team",
city=f"City {i}",
)
for i in range(1, 6)
]
)
yield ndb.put_multi_async(
[
DistrictTeam(
id=f"{district_key}_frc{i}",
year=year,
district_key=ndb.Key(District, district_key),
team=ndb.Key(Team, f"frc{i}"),
)
for i in range(1, 6)
]
)
@ndb.synctasklet
def preseed_event_for_team(team_number: TeamNumber, event_key: EventKey) -> Generator:
yield Event(
id=event_key,
event_short=event_key[4:],
year=int(event_key[:4]),
name="Test Event",
event_type_enum=EventType.REGIONAL,
start_date=datetime(2020, 3, 1),
end_date=datetime(2020, 3, 5),
webcast_json=json.dumps(
[
{"type": "twitch", "channel": "robosportsnetwork"},
{"type": "twitch", "channel": "firstinspires"},
]
),
).put_async()
yield EventTeam(
id=f"{event_key}_frc{team_number}",
event=ndb.Key(Event, event_key),
team=ndb.Key(Team, f"frc{team_number}"),
year=int(event_key[:4]),
).put_async()
def get_team_info(resp_data: str) -> TeamInfo:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
header = soup.find(id="team-title")
location = soup.find(id="team-location")
full_name = soup.find(id="team-name")
rookie_year = soup.find(id="team-rookie-year")
last_competed = soup.find(id="team-last-competed")
website = soup.find(id="team-website")
home_cmp = soup.find(id="team-home-cmp")
hof = soup.find(id="team-hof")
district = soup.find(id="team-district")
district = district.find("a") if district else None
social_media = soup.find(id="team-social-media")
social_media = (
[
(m["data-media-type"], "".join(m.stripped_strings))
for m in social_media.find_all(attrs={"data-media-type": True})
]
if social_media
else None
)
preferred_carousel = soup.find(id=re.compile(r"team-carousel-frc\d+"))
preferred_medias = (
[
(m["data-media-type"], m["data-foreign-key"])
for m in preferred_carousel.find_all(attrs={"data-media-type": True})
]
if preferred_carousel
else None
)
current_event = None
current_soup = soup.find(id="current-event")
if current_soup:
current_webcast = current_soup.find(id="current-event-webcast")
currently_competing = current_soup.find(attrs={"class": "panel-title"})
upcoming_matches = current_soup.find(attrs={"class": "panel-body"})
upcoming_match_table = (
upcoming_matches.find(attrs={"class": "match-table"})
if upcoming_matches
else None
)
current_event = TeamCurrentEvent(
event_key=current_soup["data-event-key"],
webcast=(current_webcast["href"], "".join(current_webcast.stripped_strings))
if current_webcast
else None,
currently_competing="".join(currently_competing.stripped_strings)
if currently_competing
else None,
upcoming_matches=upcoming_match_table is not None,
)
return TeamInfo(
header="".join(header.stripped_strings),
location=location.string.strip() if location else None,
full_name=full_name.string.strip() if full_name else None,
rookie_year=rookie_year.string.strip() if rookie_year else None,
last_competed=last_competed.string.strip() if last_competed else None,
website=website.string.strip() if website else None,
home_cmp=home_cmp.string.strip() if home_cmp else None,
hof=hof.string.strip() if hof else None,
district=district.string.strip() if district else None,
district_link=district["href"] if district else None,
social_media=social_media or None,
preferred_medias=preferred_medias or None,
current_event=current_event,
)
def get_team_history(resp_data: str) -> Optional[List[TeamEventHistory]]:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
history_table = soup.find(id="competition-list-table")
if not history_table:
return None
events = history_table.find("tbody").find_all("tr")
return [
TeamEventHistory(
year=int(e.find_all("td")[0].string),
event="".join(e.find_all("td")[1].stripped_strings),
awards=list(e.find_all("td")[2].stripped_strings),
)
for e in events
]
def get_page_title(resp_data: str) -> str:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
title = soup.find("title")
return title.string.strip()
def get_years_participated_dropdown(resp_data: str) -> List[str]:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
dropdown = soup.find("ul", id="team-year-dropdown")
return [li.string.strip() for li in dropdown.contents if li.name == "li"]
def get_team_event_participation(
resp_data: str, event_key: EventKey
) -> TeamEventParticipation:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
event = soup.find(id=event_key)
return TeamEventParticipation(
event_name=event.find("h3").string.strip(),
)
def assert_alert(div: bs4.element.Tag, title: str, message: str, success: bool) -> None:
# Ensure our status alert contains 1) a close button, 2) a h4, 3) a status message
close_button = div.find(
"button", attrs={"type": "button", "class": "close", "data-dismiss": "alert"}
)
assert close_button
assert close_button.text.encode("utf-8") == b"\xc3\x97"
assert ("alert-success" if success else "alert-danger") in div.attrs["class"]
alert_title = div.find("h4")
assert alert_title
assert alert_title.text == title
alert_message = div.find("p")
assert alert_message
assert alert_message.text == message
def find_teams_tables(resp_data: str) -> List[bs4.element.Tag]:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
return soup.find_all(id=re.compile(r"^teams_[ab]$"))
def get_teams_from_table(table: bs4.element.Tag) -> List[ParsedTeam]:
team_rows = table.find("tbody").find_all("tr")
parsed_teams = []
for t in team_rows:
team_number = t.find(id=re.compile(r"^team-\d+-number"))
team_name = t.find(id=re.compile(r"^team-\d+-name"))
team_location = t.find(id=re.compile(r"^team-\d+-location"))
parsed_teams.append(
ParsedTeam(
team_number=int(team_number.string),
team_number_link=team_number.get("href"),
team_name=team_name.string,
team_name_link=team_name.get("href"),
team_location=team_location.string,
)
)
return parsed_teams
def get_all_teams(resp_data: str) -> List[ParsedTeam]:
tables = find_teams_tables(resp_data)
if len(tables) == 0:
return []
assert len(tables) == 2
return get_teams_from_table(tables[0]) + get_teams_from_table(tables[1])
def get_HOF_awards(resp_data: str) -> Optional[List[TeamHOFInfo]]:
soup = bs4.BeautifulSoup(resp_data, "html.parser")
banners = soup.find_all("div", class_="panel-default")
return [
TeamHOFInfo(
team_number=int(
none_throws(re.match(r"\/team\/(\d+)", b.find("a")["href"]))[1]
),
year=int(
none_throws(
re.match(
r"(\d+)",
b.find("div", {"class": "award-event"}).find("span").string,
)
)[1]
),
event=b.find("div", {"class": "award-event"}).find("span").string,
)
for b in banners
]
| mit | d92ce018def98d62ef3679002ab69173 | 31.709859 | 88 | 0.594557 | 3.437537 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/datafeed_parsers/csv_offseason_matches_parser.py | 1 | 3429 | import csv
import json
import re
from io import StringIO
class CSVOffseasonMatchesParser:
@classmethod
def parse(cls, data):
"""
Parse CSV that contains match results.
Format is as follows:
match_id, red1, red2, red3, blue1, blue2, blue3, red score, blue score
Example formats of match_id:
qm1, sf2m1, f1m1
"""
matches = list()
csv_data = list(
csv.reader(StringIO(data), delimiter=",", skipinitialspace=True)
)
for row in csv_data:
matches.append(cls.parse_csv_match(row))
return matches, False
@classmethod
def parse_csv_match(cls, row):
(
match_id,
red_1,
red_2,
red_3,
blue_1,
blue_2,
blue_3,
red_score,
blue_score,
) = row
for i in range(len(row)):
row[i] = row[i].strip()
team_key_names = []
red_teams = [red_1, red_2, red_3]
red_team_strings = []
for team in red_teams:
red_team_strings.append("frc" + team.upper())
if team.isdigit():
team_key_names.append("frc" + team.upper())
blue_teams = [blue_1, blue_2, blue_3]
blue_team_strings = []
for team in blue_teams:
blue_team_strings.append("frc" + team.upper())
if team.isdigit():
team_key_names.append("frc" + team.upper())
if not red_score:
red_score = -1
else:
red_score = int(red_score)
if not blue_score:
blue_score = -1
else:
blue_score = int(blue_score)
comp_level, match_number, set_number = cls.parse_match_number_info(match_id)
alliances = {
"red": {"teams": red_team_strings, "score": red_score},
"blue": {"teams": blue_team_strings, "score": blue_score},
}
match = {
"alliances_json": json.dumps(alliances),
"comp_level": comp_level,
"match_number": match_number,
"set_number": set_number,
"team_key_names": team_key_names,
}
return match
@classmethod
def parse_match_number_info(cls, string):
string = string.strip()
COMP_LEVEL_MAP = {
"qm": "qm",
"efm": "ef",
"qfm": "qf",
"sfm": "sf",
"fm": "f",
}
MATCH_PARSE_STYLE = {
"qm": cls.parse_qual_match_number_info,
"ef": cls.parse_elim_match_number_info,
"qf": cls.parse_elim_match_number_info,
"sf": cls.parse_elim_match_number_info,
"f": cls.parse_elim_match_number_info,
}
pattern = re.compile("[0-9]")
comp_level = COMP_LEVEL_MAP[pattern.sub("", string)]
match_number, set_number = MATCH_PARSE_STYLE[comp_level](string)
return comp_level, match_number, set_number
@classmethod
def parse_qual_match_number_info(cls, string):
match_number = int(re.sub(r"\D", "", string))
return match_number, 1
@classmethod
def parse_elim_match_number_info(cls, string):
set_number, match_number = string.split("m")
match_number = int(match_number)
set_number = int(set_number[-1])
return match_number, set_number
| mit | 4a24ecf7c08166eba6c7b8bee65a1e78 | 27.106557 | 84 | 0.514436 | 3.632415 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/tests/test_match_controller.py | 5 | 3134 | from datetime import datetime
import unittest2
import webapp2
import webtest
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from webapp2_extras.routes import RedirectRoute
from consts.event_type import EventType
from controllers.match_controller import MatchDetail
from models.event import Event
from models.match import Match
class TestMatchController(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
app = webapp2.WSGIApplication([
RedirectRoute(r'/match/<match_key>', MatchDetail, 'match-detail'),
])
self.testapp = webtest.TestApp(app)
self.match = Match(
id="2014cc_f1m1",
event=ndb.Key(Event, "2014cc"),
year=2014,
comp_level="f",
set_number=1,
match_number=1,
team_key_names=[u'frc846', u'frc2135', u'frc971', u'254', u'frc1678', u'frc973'],
time=datetime.fromtimestamp(1409527874),
time_string="4:31 PM",
youtube_videos=["JbwUzl3W9ug", "bHGyTjxbLz8"],
tba_videos=[],
alliances_json='{\
"blue": {\
"score": 270,\
"teams": [\
"frc846",\
"frc2135",\
"frc971"]},\
"red": {\
"score": 310,\
"teams": [\
"frc254",\
"frc1678",\
"frc973"]}}',
score_breakdown_json='{\
"blue": {\
"auto": 70,\
"teleop_goal+foul": 40,\
"assist": 120,\
"truss+catch": 40\
},"red": {\
"auto": 70,\
"teleop_goal+foul": 50,\
"assist": 150,\
"truss+catch": 40}}'
)
self.event = Event(
id="2014cc",
name="Cheesy Champs",
event_type_enum=EventType.OFFSEASON,
short_name="Cheesy Champs",
event_short="cc",
year=2014,
end_date=datetime(2014, 03, 27),
official=True,
city='Hartford',
state_prov='CT',
country='USA',
venue="Some Venue",
venue_address="Some Venue, Hartford, CT, USA",
timezone_id="America/New_York",
start_date=datetime(2014, 03, 24)
)
self.match.put()
self.event.put()
def tearDown(self):
self.testbed.deactivate()
def test_match_detail(self):
response = self.testapp.get("/match/2014cc_f1m1")
self.assertEqual(response.status_int, 200)
def test_bad_match_detail(self):
response = self.testapp.get("/match/2014cc_f1m2", status=404)
self.assertEqual(response.status_int, 404)
| mit | 4f70b71defe74bf50bb3593a9fe39cbe | 31.989474 | 93 | 0.506063 | 3.89801 | false | true | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/middleware.py | 1 | 2444 | from typing import Any, Callable
from flask import Flask
from google.appengine.ext import ndb
from werkzeug.wrappers import Request
from werkzeug.wsgi import ClosingIterator
from backend.common.environment import Environment
from backend.common.profiler import send_traces, Span, trace_context
from backend.common.run_after_response import execute_callbacks, local_context
class TraceRequestMiddleware:
"""
A middleware that gives trace_context access to the request
"""
app: Callable[[Any, Any], Any]
def __init__(self, app: Callable[[Any, Any], Any]):
self.app = app
def __call__(self, environ: Any, start_response: Any):
trace_context.request = Request(environ)
return self.app(environ, start_response)
class AfterResponseMiddleware:
"""
A middleware that handles tasks after handling the response.
"""
app: Callable[[Any, Any], Any]
def __init__(self, app: Callable[[Any, Any], Any]):
self.app = app
@ndb.toplevel
def __call__(self, environ: Any, start_response: Any):
local_context.request = Request(environ)
return ClosingIterator(self.app(environ, start_response), self._run_after)
def _run_after(self):
with Span("Running AfterResponseMiddleware"):
pass
send_traces()
execute_callbacks()
def install_middleware(app: Flask, configure_secret_key: bool = True) -> None:
@app.before_request
def _app_before():
if configure_secret_key and not app.secret_key:
_set_secret_key(app)
# The middlewares get added in order of this last, and each wraps the previous
# This means, the last one in this list is the "outermost" middleware that runs
# _first_ for a given request, for the cases when order matters
middlewares = [
AfterResponseMiddleware,
TraceRequestMiddleware,
]
for middleware in middlewares:
app.wsgi_app = middleware(app.wsgi_app) # type: ignore[override]
def _set_secret_key(app: Flask) -> None:
from backend.common.sitevars.flask_secrets import FlaskSecrets
secret_key = FlaskSecrets.secret_key()
if Environment.is_prod():
if not secret_key:
raise Exception("Secret key not set in production!")
if secret_key == FlaskSecrets.DEFAULT_SECRET_KEY:
raise Exception("Secret key may not be default in production!")
app.secret_key = secret_key
| mit | 7a5b559b9b24a8dbf5a1343850efbc6d | 31.157895 | 83 | 0.678396 | 4.039669 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/api/api_trusted_parsers/json_matches_parser.py | 1 | 9527 | import datetime
import json
from typing import (
Any,
AnyStr,
Dict,
Mapping,
MutableSequence,
Optional,
Sequence,
TypedDict,
)
from pyre_extensions import safe_json
from backend.common.consts.alliance_color import AllianceColor
from backend.common.consts.comp_level import COMP_LEVELS, CompLevel
from backend.common.datafeed_parsers.exceptions import ParserInputException
from backend.common.helpers.score_breakdown_keys import ScoreBreakdownKeys
from backend.common.models.alliance import MatchAlliance
from backend.common.models.keys import TeamKey, Year
from backend.common.models.team import Team
class MatchInput(TypedDict, total=False):
comp_level: CompLevel
set_number: int
match_number: int
alliances: Mapping[AllianceColor, MatchAlliance]
score_breakdowns: Mapping[AllianceColor, Dict[str, Any]]
time_string: str
time: str
display_name: str
class ParsedMatch(TypedDict):
comp_level: CompLevel
set_number: int
match_number: int
alliances_json: str
score_breakdown_json: Optional[str]
time_string: str
time: datetime.datetime
team_key_names: Sequence[TeamKey]
display_name: Optional[str]
class JSONMatchesParser:
@staticmethod
def parse(matches_json: AnyStr, year: Year) -> Sequence[ParsedMatch]:
"""
Parse JSON that contains a list of matches for a given year where each match is a dict of:
comp_level: String in the set {"qm", "ef", "qf", "sf", "f"}
set_number: Integer identifying the elim set number. Ignored for qual matches. ex: the 4 in qf4m2
match_number: Integer identifying the match number within a set. ex: the 2 in qf4m2
alliances: Dict of {'red': {'teams': ['frcXXX'...], 'score': S, 'surrogates': ['frcXXX'...], 'dqs': ['frcXXX'...]}, 'blue': {...}}. Where scores (S) are integers. Null scores indicate that a match has not yet been played. surrogates and dqs are optional.
score_breakdown: Dict of {'red': {K1: V1, K2: V2, ...}, 'blue': {...}}. Where Kn are keys and Vn are values for those keys.
time_string: String in the format "(H)H:MM AM/PM" for when the match will be played in the event's local timezone. ex: "9:15 AM"
time: UTC time of the match as a string in ISO 8601 format (YYYY-MM-DDTHH:MM:SS).
"""
matches = safe_json.loads(matches_json, Sequence[MatchInput], validate=False)
if not isinstance(matches, list):
raise ParserInputException("Invalid JSON. Please check input.")
parsed_matches: MutableSequence[ParsedMatch] = []
for match in matches:
if type(match) is not dict:
raise ParserInputException("Matches must be dicts.")
comp_level = match.get("comp_level", None)
set_number = match.get("set_number", None)
match_number = match.get("match_number", None)
alliances = match.get("alliances", None)
score_breakdown = match.get("score_breakdown", None)
time_string = match.get("time_string", None)
time_utc = match.get("time_utc", None)
display_name = match.get("display_name", None)
if comp_level is None:
raise ParserInputException("Match must have a 'comp_level'")
if comp_level not in COMP_LEVELS:
raise ParserInputException(
"'comp_level' must be one of: {}".format(COMP_LEVELS)
)
if comp_level == "qm":
set_number = 1
elif set_number is None or type(set_number) is not int:
raise ParserInputException("Match must have an integer 'set_number'")
if match_number is None or type(match_number) is not int:
raise ParserInputException("Match must have an integer 'match_number'")
if type(alliances) is not dict:
raise ParserInputException("'alliances' must be a dict")
else:
for color, details in alliances.items():
if color not in {"red", "blue"}:
raise ParserInputException(
"Alliance color '{}' not recognized".format(color)
)
if "teams" not in details:
raise ParserInputException(
f"alliances[{str(color)}] must have key 'teams'"
)
if "score" not in details:
raise ParserInputException(
f"alliances[{str(color)}] must have key 'score'"
)
for team_key in details["teams"]:
if not Team.validate_key_name(str(team_key)):
raise ParserInputException(
f"Bad team: '{team_key}'. Must follow format 'frcXXX'."
)
if (
details["score"] is not None
and type(details["score"]) is not int
):
raise ParserInputException(
f"alliances[{str(color)}]['score'] must be an integer or null"
)
for team_key in details.get("surrogates", []):
if not Team.validate_key_name(str(team_key)):
raise ParserInputException(
f"Bad surrogate team: '{team_key}'. Must follow format 'frcXXX'."
)
if team_key not in details["teams"]:
raise ParserInputException(
f"Bad surrogate team: '{team_key}'. Must be a team in the match.'."
)
for team_key in details.get("dqs", []):
if not Team.validate_key_name(str(team_key)):
raise ParserInputException(
f"Bad dq team: '{team_key}'. Must follow format 'frcXXX'."
)
if team_key not in details["teams"]:
raise ParserInputException(
f"Bad dq team: '{team_key}'. Must be a team in the match.'."
)
if score_breakdown is not None:
if type(score_breakdown) is not dict:
raise ParserInputException("'score_breakdown' must be a dict")
else:
for color, breakdown in score_breakdown.items():
if color not in {"red", "blue"}:
raise ParserInputException(
f"Alliance color '{color}' not recognized"
)
for k in breakdown.keys():
is_valid = ScoreBreakdownKeys.is_valid_score_breakdown_key(
k, year
)
if is_valid is not True:
raise ParserInputException(
f"Valid score breakdowns for {year} are: {is_valid}"
)
datetime_utc = None
if time_utc is not None:
try:
datetime_utc = datetime.datetime.fromisoformat(time_utc)
# remove timezone info because DatetimeProperty can't handle timezones
datetime_utc = datetime_utc.replace(tzinfo=None)
except ValueError:
raise ParserInputException(
"Could not parse 'time_utc'. Check that it is in ISO 8601 format."
)
if display_name is not None and type(display_name) is not str:
raise ParserInputException("'display_name' must be a string")
# validation passed. build new dicts to sanitize
parsed_alliances: Dict[AllianceColor, MatchAlliance] = {
AllianceColor.RED: {
"teams": alliances["red"]["teams"],
"score": alliances["red"]["score"],
"surrogates": alliances["red"].get("surrogates", []),
"dqs": alliances["red"].get("dqs", []),
},
AllianceColor.BLUE: {
"teams": alliances["blue"]["teams"],
"score": alliances["blue"]["score"],
"surrogates": alliances["blue"].get("surrogates", []),
"dqs": alliances["blue"].get("dqs", []),
},
}
parsed_match: ParsedMatch = {
"comp_level": comp_level,
"set_number": set_number,
"match_number": match_number,
"alliances_json": json.dumps(parsed_alliances),
"score_breakdown_json": json.dumps(score_breakdown)
if score_breakdown is not None
else None,
"time_string": time_string,
"time": datetime_utc,
"team_key_names": parsed_alliances[AllianceColor.RED]["teams"]
+ parsed_alliances[AllianceColor.BLUE]["teams"],
"display_name": display_name,
}
parsed_matches.append(parsed_match)
return parsed_matches
| mit | b63d48e141aa6cb07c6aa7b26feb8dbd | 45.473171 | 262 | 0.52346 | 4.536667 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/api/handlers/decorators.py | 1 | 4838 | from functools import wraps
from typing import Set
from flask import g, request
from backend.api.trusted_api_auth_helper import TrustedApiAuthHelper
from backend.common.auth import current_user
from backend.common.consts.auth_type import AuthType
from backend.common.consts.renamed_districts import RenamedDistricts
from backend.common.models.api_auth_access import ApiAuthAccess
from backend.common.models.district import District
from backend.common.models.event import Event
from backend.common.models.match import Match
from backend.common.models.team import Team
from backend.common.profiler import Span
def api_authenticated(func):
@wraps(func)
def decorated_function(*args, **kwargs):
with Span("api_authenticated"):
auth_key = request.headers.get(
"X-TBA-Auth-Key", request.args.get("X-TBA-Auth-Key")
)
auth_owner_id = None
if auth_key:
auth = ApiAuthAccess.get_by_id(auth_key)
if auth and auth.is_read_key:
auth_owner_id = auth.owner.id() if auth.owner else None
else:
return (
{
"Error": "X-TBA-Auth-Key is invalid. Please get an access key at http://www.thebluealliance.com/account."
},
401,
)
else:
user = current_user()
if user:
auth_owner_id = user.account_key.id()
else:
return (
{
"Error": "X-TBA-Auth-Key is a required header or URL param. Please get an access key at http://www.thebluealliance.com/account."
},
401,
)
# Set for our GA event tracking in `track_call_after_response`
g.auth_owner_id = auth_owner_id
return func(*args, **kwargs)
return decorated_function
def require_write_auth(auth_types: Set[AuthType]):
def decorator(func):
@wraps(func)
def decorated_function(*args, **kwargs):
with Span("require_write_auth"):
event_key = kwargs["event_key"]
# This will abort the request on failure
TrustedApiAuthHelper.do_trusted_api_auth(event_key, auth_types)
return func(*args, **kwargs)
return decorated_function
return decorator
def validate_keys(func):
@wraps(func)
def decorated_function(*args, **kwargs):
with Span("validate_keys"):
# Check key format
team_key = kwargs.get("team_key")
if team_key and not Team.validate_key_name(team_key):
return {"Error": f"{team_key} is not a valid team key"}, 404
event_key = kwargs.get("event_key")
if event_key and not Event.validate_key_name(event_key):
return {"Error": f"{event_key} is not a valid event key"}, 404
match_key = kwargs.get("match_key")
if match_key and not Match.validate_key_name(match_key):
return {"Error": f"{match_key} is not a valid match key"}, 404
district_key = kwargs.get("district_key")
if district_key and not District.validate_key_name(district_key):
return {"Error": f"{district_key} is not a valid district key"}, 404
# Check key existence
team_future = None
if team_key:
team_future = Team.get_by_id_async(team_key)
event_future = None
if event_key:
event_future = Event.get_by_id_async(event_key)
match_future = None
if match_key:
match_future = Match.get_by_id_async(match_key)
district_exists_future = None
if district_key:
district_exists_future = RenamedDistricts.district_exists_async(
district_key
)
if team_future is not None and not team_future.get_result():
return {"Error": f"team key: {team_key} does not exist"}, 404
if event_future is not None and not event_future.get_result():
return {"Error": f"event key: {event_key} does not exist"}, 404
if match_future is not None and not match_future.get_result():
return {"Error": f"match key: {match_key} does not exist"}, 404
if (
district_exists_future is not None
and not district_exists_future.get_result()
):
return {"Error": f"district key: {district_key} does not exist"}, 404
return func(*args, **kwargs)
return decorated_function
| mit | 3cbbb73989fe3e03af67ae8d5d0fb929 | 35.651515 | 156 | 0.560149 | 4.21796 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/tasks_io/handlers/math.py | 1 | 11587 | import json
import logging
from typing import List, Optional
from flask import abort, Blueprint, make_response, render_template, request, url_for
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from werkzeug.wrappers import Response
from backend.common.consts.event_type import EventType, SEASON_EVENT_TYPES
from backend.common.futures import TypedFuture
from backend.common.helpers.district_helper import DistrictHelper
from backend.common.helpers.event_helper import EventHelper
from backend.common.helpers.event_insights_helper import EventInsightsHelper
from backend.common.helpers.event_team_updater import EventTeamUpdater
from backend.common.helpers.match_helper import MatchHelper
from backend.common.helpers.matchstats_helper import MatchstatsHelper
from backend.common.helpers.prediction_helper import PredictionHelper
from backend.common.manipulators.district_manipulator import DistrictManipulator
from backend.common.manipulators.event_details_manipulator import (
EventDetailsManipulator,
)
from backend.common.manipulators.event_team_manipulator import EventTeamManipulator
from backend.common.models.district import District
from backend.common.models.district_ranking import DistrictRanking
from backend.common.models.event import Event
from backend.common.models.event_details import EventDetails
from backend.common.models.keys import DistrictKey, EventKey, Year
from backend.common.models.team import Team
from backend.common.queries.district_query import DistrictsInYearQuery
from backend.common.queries.event_query import DistrictEventsQuery, EventListQuery
from backend.common.queries.team_query import DistrictTeamsQuery
blueprint = Blueprint("math", __name__)
@blueprint.route("/tasks/math/enqueue/district_points_calc/<int:year>")
def enqueue_event_district_points_calc(year: Year) -> Response:
"""
Enqueues calculation of district points for all season events for a given year
"""
event_keys: List[ndb.Key] = Event.query(
Event.year == year, Event.event_type_enum.IN(SEASON_EVENT_TYPES)
).fetch(None, keys_only=True)
for event_key in event_keys:
taskqueue.add(
url=url_for(
"math.event_district_points_calc", event_key=event_key.string_id()
),
method="GET",
target="py3-tasks-io",
queue_name="default",
)
if (
"X-Appengine-Taskname" not in request.headers
): # Only write out if not in taskqueue
return make_response(
"Enqueued for: {}".format([event_key.id() for event_key in event_keys])
)
return make_response("")
@blueprint.route("/tasks/math/do/district_points_calc/<event_key>")
def event_district_points_calc(event_key: EventKey) -> Response:
"""
Calculates district points for an event
"""
event = Event.get_by_id(event_key)
if event is None:
abort(404)
if event.event_type_enum not in SEASON_EVENT_TYPES and not request.args.get(
"allow-offseason", None
):
return make_response(
f"Can't calculate district points for a non-season event {event.key_name}!",
400,
)
district_points = DistrictHelper.calculate_event_points(event)
event_details = EventDetails(id=event_key, district_points=district_points)
EventDetailsManipulator.createOrUpdate(event_details)
# Enqueue task to update rankings
if event.district_key:
taskqueue.add(
url=url_for(
"math.district_rankings_calc",
district_key=event.district_key.string_id(),
),
method="GET",
target="py3-tasks-io",
queue_name="default",
)
if (
"X-Appengine-Taskname" not in request.headers
): # Only write out if not in taskqueue
return make_response(json.dumps(district_points, sort_keys=True, indent=2))
return make_response("")
@blueprint.route("/tasks/math/enqueue/district_rankings_calc/<int:year>")
def enqueue_district_rankings_calc(year: Year) -> Response:
"""
Enqueues calculation of rankings for all districts for a given year
"""
districts = DistrictsInYearQuery(int(year)).fetch()
district_keys = [district.key.id() for district in districts]
for district_key in district_keys:
taskqueue.add(
url=url_for("math.district_rankings_calc", district_key=district_key),
method="GET",
target="py3-tasks-io",
queue_name="default",
)
taskqueue.add(
url=url_for("frc_api.district_rankings", district_key=district_key),
method="GET",
target="py3-tasks-io",
queue_name="default",
)
if (
"X-Appengine-Taskname" not in request.headers
): # Only write out if not in taskqueue
return make_response(f"Enqueued for: {district_keys}")
return make_response("")
@blueprint.route("/tasks/math/do/district_rankings_calc/<district_key>")
def district_rankings_calc(district_key: DistrictKey) -> Response:
"""
Calculates district rankings for a district year
"""
district = District.get_by_id(district_key)
if not district:
return make_response(f"District {district_key} not found", 404)
events_future: TypedFuture[List[Event]] = DistrictEventsQuery(
district_key
).fetch_async()
teams_future: TypedFuture[List[Team]] = DistrictTeamsQuery(
district_key
).fetch_async()
events = events_future.get_result()
for event in events:
event.prep_details()
events = EventHelper.sorted_events(events)
team_totals = DistrictHelper.calculate_rankings(events, teams_future, district.year)
rankings: List[DistrictRanking] = []
current_rank = 1
for key, points in team_totals.items():
point_detail = DistrictRanking(
rank=current_rank,
team_key=key,
event_points=[],
rookie_bonus=points.get("rookie_bonus", 0),
point_total=points["point_total"],
)
for event, event_points in points["event_points"]:
event_points["event_key"] = event.key_name
event_points["district_cmp"] = (
event.event_type_enum == EventType.DISTRICT_CMP
or event.event_type_enum == EventType.DISTRICT_CMP_DIVISION
)
point_detail["event_points"].append(event_points)
if district.year == 2022:
point_detail["other_bonus"] = points.get("other_bonus", 0)
rankings.append(point_detail)
current_rank += 1
if rankings:
district.rankings = rankings
DistrictManipulator.createOrUpdate(district)
if (
"X-Appengine-Taskname" not in request.headers
): # Only write out if not in taskqueue
return make_response(
f"Finished calculating rankings for: {district_key}:\n{rankings}"
)
return make_response("")
@blueprint.route("/tasks/math/enqueue/event_matchstats/now", defaults={"year": None})
@blueprint.route("/tasks/math/enqueue/event_matchstats/<int:year>")
def enqueue_event_matchstats(year: Optional[Year]) -> str:
"""
Enqueues Matchstats calculation
"""
if year is None:
events = EventHelper.events_within_a_day()
else:
events: List[Event] = EventListQuery(year=year).fetch()
events = EventHelper.sorted_events(events)
for event in events:
taskqueue.add(
url="/tasks/math/do/event_matchstats/" + event.key_name,
method="GET",
target="py3-tasks-io",
queue_name="run-in-order", # Because predictions depend on past events
)
template_values = {
"event_count": len(events),
"year": year,
}
return render_template("math/event_matchstats_enqueue.html", **template_values)
@blueprint.route("/tasks/math/do/event_matchstats/<event_key>")
def event_matchstats_calc(event_key: EventKey) -> Response:
"""
Calculates match stats (OPR/DPR/CCWM) for an event
Calculates predictions for an event
Calculates insights for an event
"""
event = Event.get_by_id(event_key)
if not event:
abort(404)
matchstats_dict = MatchstatsHelper.calculate_matchstats(event.matches, event.year)
if not any([v != {} for v in matchstats_dict.values()]):
logging.warning("Matchstat calculation for {} failed!".format(event_key))
matchstats_dict = None
predictions_dict = None
if (
event.year in {2016, 2017, 2018, 2019, 2020, 2022}
and event.event_type_enum in SEASON_EVENT_TYPES
) or event.enable_predictions:
sorted_matches = MatchHelper.play_order_sorted_matches(event.matches)
(
match_predictions,
match_prediction_stats,
stat_mean_vars,
) = PredictionHelper.get_match_predictions(sorted_matches)
(
ranking_predictions,
ranking_prediction_stats,
) = PredictionHelper.get_ranking_predictions(sorted_matches, match_predictions)
predictions_dict = {
"match_predictions": match_predictions,
"match_prediction_stats": match_prediction_stats,
"stat_mean_vars": stat_mean_vars,
"ranking_predictions": ranking_predictions,
"ranking_prediction_stats": ranking_prediction_stats,
}
event_insights = EventInsightsHelper.calculate_event_insights(
event.matches, event.year
)
event_details = EventDetails(
id=event_key,
matchstats=matchstats_dict,
predictions=predictions_dict,
insights=event_insights,
)
EventDetailsManipulator.createOrUpdate(event_details)
template_values = {
"matchstats_dict": matchstats_dict,
}
if (
"X-Appengine-Taskname" not in request.headers
): # Only write out if not in taskqueue
return make_response(
render_template("math/event_matchstats_do.html", **template_values)
)
return make_response("")
@blueprint.route("/tasks/math/enqueue/eventteam_update/<when>")
def enqueue_eventteam_update(when: str) -> str:
if when == "all":
event_keys = Event.query().fetch(10000, keys_only=True)
else:
event_keys = Event.query(Event.year == int(when)).fetch(10000, keys_only=True)
for event_key in event_keys:
taskqueue.add(
url="/tasks/math/do/eventteam_update/" + event_key.id(), method="GET"
)
template_values = {
"event_keys": event_keys,
}
return render_template("math/eventteam_update_enqueue.html", **template_values)
@blueprint.route("/tasks/math/do/eventteam_update/<event_key>")
def update_eventteams(event_key: EventKey) -> str:
"""
Task that updates the EventTeam index for an Event.
Can only update or delete EventTeams for unregistered teams.
^^^ Does it actually do this? Eugene -- 2013/07/30
"""
_, event_teams, et_keys_to_del = EventTeamUpdater.update(event_key)
if event_teams:
event_teams = list(filter(lambda et: et.team.get() is not None, event_teams))
event_teams = EventTeamManipulator.createOrUpdate(event_teams)
if et_keys_to_del:
EventTeamManipulator.delete_keys(et_keys_to_del)
template_values = {
"event_teams": event_teams,
"deleted_event_teams_keys": et_keys_to_del,
}
return render_template("math/eventteam_update_do.html", **template_values)
| mit | 44be528b1106c023fee2bd9fd9e5f933 | 34.218845 | 88 | 0.661604 | 3.738948 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/models/district_team.py | 1 | 1730 | from typing import Set
from google.appengine.ext import ndb
from backend.common.models.cached_model import CachedModel
from backend.common.models.district import District
from backend.common.models.keys import DistrictKey, DistrictTeamKey, TeamKey, Year
from backend.common.models.team import Team
class DistrictTeam(CachedModel):
"""
DistrictTeam represents the "home district" for a team in a year
key_name is like <year><district_short>_<team_key> (e.g. 2015ne_frc1124)
"""
team = ndb.KeyProperty(kind=Team)
year: Year = ndb.IntegerProperty()
district_key = ndb.KeyProperty(kind=District)
created = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
updated = ndb.DateTimeProperty(auto_now=True, indexed=False)
_mutable_attrs: Set[str] = {
"district_key", # for migrations
}
def __init__(self, *args, **kw):
# store set of affected references referenced keys for cache clearing
# keys must be model properties
self._affected_references = {
"district_key": set(),
"team": set(),
"year": set(),
}
super(DistrictTeam, self).__init__(*args, **kw)
@property
def key_name(self) -> DistrictTeamKey:
return self.render_key_name(self.district_key.id(), self.team.id())
@classmethod
def validate_key_name(cls, key: str) -> bool:
split = key.split("_")
return (
len(split) == 2
and District.validate_key_name(split[0])
and Team.validate_key_name(split[1])
)
@classmethod
def render_key_name(self, district_key: DistrictKey, team_key: TeamKey):
return "{}_{}".format(district_key, team_key)
| mit | 1ec0bf519879294bfe1b9018c3a07fea | 31.641509 | 82 | 0.642775 | 3.596674 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/helpers/webcast_online_helper.py | 1 | 9187 | import json
import logging
from typing import Any, Generator, List, Optional
from google.appengine.api import urlfetch
from google.appengine.ext import ndb
from backend.common.consts.webcast_status import WebcastStatus
from backend.common.consts.webcast_type import WebcastType
from backend.common.memcache import MemcacheClient
from backend.common.models.webcast import Webcast
from backend.common.sitevars.google_api_secret import GoogleApiSecret
from backend.common.sitevars.twitch_secrets import TwitchSecrets
class WebcastOnlineHelper:
@classmethod
@ndb.toplevel
def add_online_status(cls, webcasts: List[Webcast]) -> Generator[Any, Any, None]:
for webcast in webcasts:
yield cls.add_online_status_async(webcast)
@classmethod
@ndb.tasklet
def add_online_status_async(cls, webcast: Webcast) -> Generator[Any, Any, None]:
memcache = MemcacheClient.get()
memcache_key = "webcast_status:{}:{}:{}".format(
webcast["type"], webcast.get("channel"), webcast.get("file")
).encode()
cached_webcast: Optional[Webcast] = memcache.get(memcache_key)
if cached_webcast:
if "status" in cached_webcast:
webcast["status"] = cached_webcast.get("status", WebcastStatus.UNKNOWN)
if "stream_title" in cached_webcast:
webcast["stream_title"] = cached_webcast.get("stream_title")
if "viewer_count" in cached_webcast:
webcast["viewer_count"] = cached_webcast.get("viewer_count")
return
webcast["status"] = WebcastStatus.UNKNOWN
webcast["stream_title"] = None
webcast["viewer_count"] = None
if webcast["type"] == WebcastType.TWITCH:
yield cls._add_twitch_status_async(webcast)
elif webcast["type"] == WebcastType.USTREAM:
yield cls._add_ustream_status_async(webcast)
elif webcast["type"] == WebcastType.YOUTUBE:
yield cls._add_youtube_status_async(webcast)
# Livestream charges for their API. Go figure.
# elif webcast['type'] == 'livestream':
# yield cls._add_livestream_status_async(webcast)
memcache.set(memcache_key, webcast, 60 * 5)
@classmethod
@ndb.tasklet
def _add_twitch_status_async(cls, webcast: Webcast) -> Generator[Any, Any, None]:
client_id = TwitchSecrets.client_id()
client_secret = TwitchSecrets.client_secret()
if client_id and client_secret:
# Get auth token
auth_url = "https://id.twitch.tv/oauth2/token?client_id={}&client_secret={}&grant_type=client_credentials".format(
client_id, client_secret
)
try:
rpc = urlfetch.create_rpc()
result = yield urlfetch.make_fetch_call(rpc, auth_url, method="POST")
except Exception as e:
logging.error("URLFetch failed when getting Twitch auth token.")
logging.error(e)
raise ndb.Return(None)
if result.status_code == 200:
response = json.loads(result.content)
token = response["access_token"]
else:
logging.warning(
"Twitch auth failed with status code: {}".format(result.status_code)
)
logging.warning(result.content)
raise ndb.Return(None)
# Get webcast status
status_url = "https://api.twitch.tv/helix/streams?user_login={}".format(
webcast["channel"]
)
try:
rpc = urlfetch.create_rpc()
result = yield urlfetch.make_fetch_call(
rpc,
status_url,
headers={
"Authorization": "Bearer {}".format(token),
"Client-ID": client_id,
},
)
except Exception as e:
logging.exception("URLFetch failed for: {}".format(status_url))
logging.error(e)
return None
else:
logging.warning("Must have Twitch Client ID & Secret")
return None
if result.status_code == 200:
response = json.loads(result.content)
if response["data"]:
webcast["status"] = WebcastStatus.ONLINE
webcast["stream_title"] = response["data"][0]["title"]
webcast["viewer_count"] = response["data"][0]["viewer_count"]
else:
webcast["status"] = WebcastStatus.OFFLINE
else:
logging.warning(
"Twitch status failed with code: {}".format(result.status_code)
)
logging.warning(result.content)
return None
@classmethod
@ndb.tasklet
def _add_ustream_status_async(cls, webcast: Webcast) -> Generator[Any, Any, None]:
url = "https://api.ustream.tv/channels/{}.json".format(webcast["channel"])
try:
rpc = urlfetch.create_rpc()
result = yield urlfetch.make_fetch_call(rpc, url)
except Exception:
logging.exception("URLFetch failed for: {}".format(url))
return None
if result.status_code == 200:
response = json.loads(result.content)
if response["channel"]:
webcast["status"] = (
WebcastStatus.ONLINE
if response["channel"]["status"] == "live"
else WebcastStatus.OFFLINE
)
webcast["stream_title"] = response["channel"]["title"]
else:
webcast["status"] = WebcastStatus.OFFLINE
else:
logging.warning(
"Ustream status failed with code: {}".format(result.status_code)
)
logging.warning(result.content)
return None
@classmethod
@ndb.tasklet
def _add_youtube_status_async(cls, webcast: Webcast) -> Generator[Any, Any, None]:
api_key = GoogleApiSecret.secret_key()
if api_key:
url = "https://www.googleapis.com/youtube/v3/videos?part=snippet&id={}&key={}".format(
webcast["channel"], api_key
)
try:
rpc = urlfetch.create_rpc()
result = yield urlfetch.make_fetch_call(rpc, url)
except Exception:
logging.exception("URLFetch failed for: {}".format(url))
return None
else:
logging.warning("Must have Google API key")
return None
if result.status_code == 200:
response = json.loads(result.content)
if response["items"]:
webcast["status"] = (
WebcastStatus.ONLINE
if response["items"][0]["snippet"]["liveBroadcastContent"] == "live"
else WebcastStatus.OFFLINE
)
webcast["stream_title"] = response["items"][0]["snippet"]["title"]
else:
webcast["status"] = WebcastStatus.OFFLINE
else:
logging.warning(
"YouTube status failed with code: {}".format(result.status_code)
)
logging.warning(result.content)
return None
"""
@classmethod
@ndb.tasklet
def _add_livestream_status_async(
cls, webcast: Webcast
) -> Generator[Any, Any, None]:
livestream_secrets = Sitevar.get_or_insert("livestream.secrets")
api_key = None
if livestream_secrets and livestream_secrets.contents:
api_key = livestream_secrets.contents.get("api_key")
if api_key:
url = "https://livestreamapis.com/v2/accounts/{}/events/{}".format(
webcast["channel"], webcast["file"]
)
try:
base64string = base64.encodebytes("{}:".format(api_key).encode())
headers = {"Authorization": "Basic {}".format(base64string)}
rpc = urlfetch.create_rpc()
result = yield urlfetch.make_fetch_call(rpc, url, headers=headers)
except Exception:
logging.exception("URLFetch failed for: {}".format(url))
return None
else:
logging.warning("Must have Livestream API key")
return None
if result.status_code == 200:
response = json.loads(result.content)
if response["items"]:
webcast["status"] = (
WebcastStatus.ONLINE
if response["items"][0]["snippet"]["liveBroadcastContent"] == "live"
else WebcastStatus.OFFLINE
)
webcast["stream_title"] = response["items"][0]["snippet"]["title"]
else:
webcast["status"] = WebcastStatus.OFFLINE
else:
logging.warning(
"Livestream status failed with code: {}".format(result.status_code)
)
logging.warning(result.content)
return None
"""
| mit | e4ed06458d91333ad308c9f71ce50ac2 | 38.429185 | 126 | 0.553282 | 4.307079 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/models/team_admin_access.py | 1 | 1112 | from google.appengine.ext import ndb
from backend.common.models.account import Account
from backend.common.models.team import Team
class TeamAdminAccess(ndb.Model):
"""
This class represents a pre-issued token that the TBA admins can generate
and grant to whitelisted accounts. Accounts linked to these tokens will be
granted moderator privileges for that team's media
"""
# This is the team number that this code is valid for
team_number = ndb.IntegerProperty()
year = ndb.IntegerProperty()
access_code = ndb.StringProperty()
expiration = ndb.DateTimeProperty()
account = ndb.KeyProperty(kind=Account)
created = ndb.DateTimeProperty(auto_now_add=True, indexed=False)
updated = ndb.DateTimeProperty(auto_now=True, indexed=False)
@property
def key_name(self):
return self.render_key_name(self.team_number, self.year)
@property
def team_key(self):
return ndb.Key(Team, "frc{}".format(self.team_number))
@classmethod
def render_key_name(cls, team_number, year):
return "frc{}_{}".format(team_number, year)
| mit | a0418e071e174b1ad8ba2221690d1a78 | 30.771429 | 78 | 0.706835 | 3.901754 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/controllers/district_controller.py | 1 | 6347 | import datetime
import logging
import os
from google.appengine.ext import ndb
from controllers.base_controller import CacheableHandler
from consts.event_type import EventType
from database.district_query import DistrictQuery, DistrictHistoryQuery, DistrictsInYearQuery
from database.event_query import DistrictEventsQuery
from database.team_query import DistrictTeamsQuery, EventTeamsQuery
from helpers.event_helper import EventHelper
from helpers.event_team_status_helper import EventTeamStatusHelper
from helpers.team_helper import TeamHelper
from models.event import Event
from models.event_team import EventTeam
from models.team import Team
from template_engine import jinja2_engine
class DistrictDetail(CacheableHandler):
CACHE_KEY_FORMAT = "district_detail_{}_{}_{}" # (district_abbrev, year, explicit_year)
CACHE_VERSION = 2
def __init__(self, *args, **kw):
super(DistrictDetail, self).__init__(*args, **kw)
self._cache_expiration = 60 * 15
def get(self, district_abbrev, year=None, explicit_year=False):
if year == '':
return self.redirect("/")
if year:
if not year.isdigit():
self.abort(404)
year = int(year)
explicit_year = True
else:
year = datetime.datetime.now().year
explicit_year = False
self._partial_cache_key = self.CACHE_KEY_FORMAT.format(district_abbrev, year, explicit_year)
super(DistrictDetail, self).get(district_abbrev, year, explicit_year)
def _render(self, district_abbrev, year=None, explicit_year=False):
district = DistrictQuery('{}{}'.format(year, district_abbrev)).fetch()
if not district:
self.abort(404)
events_future = DistrictEventsQuery(district.key_name).fetch_async()
# needed for district teams
district_teams_future = DistrictTeamsQuery(district.key_name).fetch_async()
# needed for valid_years
history_future = DistrictHistoryQuery(district.abbreviation).fetch_async()
# needed for valid_districts
districts_in_year_future = DistrictsInYearQuery(district.year).fetch_async()
# needed for active team statuses
live_events = []
if year == datetime.datetime.now().year: # Only show active teams for current year
live_events = EventHelper.week_events()
live_eventteams_futures = []
for event in live_events:
live_eventteams_futures.append(EventTeamsQuery(event.key_name).fetch_async())
events = events_future.get_result()
EventHelper.sort_events(events)
events_by_key = {}
for event in events:
events_by_key[event.key.id()] = event
week_events = EventHelper.group_by_week(events)
valid_districts = set()
districts_in_year = districts_in_year_future.get_result()
for dist in districts_in_year:
valid_districts.add((dist.display_name, dist.abbreviation))
valid_districts = sorted(valid_districts, key=lambda (name, _): name)
teams = TeamHelper.sort_teams(district_teams_future.get_result())
team_keys = set([t.key.id() for t in teams])
num_teams = len(teams)
middle_value = num_teams / 2
if num_teams % 2 != 0:
middle_value += 1
teams_a, teams_b = teams[:middle_value], teams[middle_value:]
# Currently Competing Team Status
event_team_keys = []
for event, teams_future in zip(live_events, live_eventteams_futures):
for team in teams_future.get_result():
if team.key.id() in team_keys:
event_team_keys.append(ndb.Key(EventTeam, '{}_{}'.format(event.key.id(), team.key.id()))) # Should be in context cache
ndb.get_multi(event_team_keys) # Warms context cache
live_events_with_teams = []
for event, teams_future in zip(live_events, live_eventteams_futures):
teams_and_statuses = []
has_teams = False
for team in teams_future.get_result():
if team.key.id() in team_keys:
has_teams = True
event_team = EventTeam.get_by_id('{}_{}'.format(event.key.id(), team.key.id())) # Should be in context cache
if event_team is None:
logging.info("No EventTeam for {}_{}".format(event.key.id(), team.key.id()))
continue
status_str = {
'alliance': EventTeamStatusHelper.generate_team_at_event_alliance_status_string(team.key.id(), event_team.status),
'playoff': EventTeamStatusHelper.generate_team_at_event_playoff_status_string(team.key.id(), event_team.status),
}
teams_and_statuses.append((
team,
event_team.status,
status_str
))
if has_teams:
teams_and_statuses.sort(key=lambda x: x[0].team_number)
live_events_with_teams.append((event, teams_and_statuses))
live_events_with_teams.sort(key=lambda x: x[0].name)
live_events_with_teams.sort(key=lambda x: EventHelper.start_date_or_distant_future(x[0]))
live_events_with_teams.sort(key=lambda x: EventHelper.end_date_or_distant_future(x[0]))
# Get valid years
district_history = history_future.get_result()
valid_years = map(lambda d: d.year, district_history)
valid_years = sorted(valid_years)
self.template_values.update({
'explicit_year': explicit_year,
'year': year,
'valid_years': valid_years,
'valid_districts': valid_districts,
'district_name': district.display_name,
'district_abbrev': district_abbrev,
'week_events': week_events,
'events_by_key': events_by_key,
'rankings': district.rankings,
'advancement': district.advancement,
'num_teams': num_teams,
'teams_a': teams_a,
'teams_b': teams_b,
'live_events_with_teams': live_events_with_teams,
})
return jinja2_engine.render('district_details.html', self.template_values)
| mit | 55f2ef6286d89ddd20816e7be5dd6f73 | 41.033113 | 139 | 0.61147 | 3.839685 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/web/handlers/suggestions/tests/suggest_match_video_review_controller_test.py | 1 | 10045 | import re
from datetime import datetime
from typing import List
from urllib.parse import urlparse
import pytest
from bs4 import BeautifulSoup
from google.appengine.ext import ndb
from werkzeug.test import Client
from backend.common.consts.account_permission import AccountPermission
from backend.common.consts.event_type import EventType
from backend.common.consts.suggestion_state import SuggestionState
from backend.common.models.event import Event
from backend.common.models.match import Match
from backend.common.models.suggestion import Suggestion
from backend.common.suggestions.suggestion_creator import (
SuggestionCreationStatus,
SuggestionCreator,
)
@pytest.fixture(autouse=True)
def createMatchAndEvent(ndb_stub):
event = Event(
id="2016necmp",
name="New England District Championship",
event_type_enum=EventType.DISTRICT_CMP,
short_name="New England",
event_short="necmp",
year=2016,
end_date=datetime(2016, 3, 27),
official=False,
city="Hartford",
state_prov="CT",
country="USA",
venue="Some Venue",
venue_address="Some Venue, Hartford, CT, USA",
timezone_id="America/New_York",
start_date=datetime(2016, 3, 24),
webcast_json="",
website="http://www.firstsv.org",
)
event.put()
match = Match(
id="2016necmp_f1m1",
event=ndb.Key(Event, "2016necmp"),
year=2016,
comp_level="f",
set_number=1,
match_number=1,
team_key_names=[
"frc846",
"frc2135",
"frc971",
"frc254",
"frc1678",
"frc973",
],
time=datetime.fromtimestamp(1409527874),
time_string="4:31 PM",
youtube_videos=["JbwUzl3W9ug"],
tba_videos=[],
alliances_json='{\
"blue": {\
"score": 270,\
"teams": [\
"frc846",\
"frc2135",\
"frc971"]},\
"red": {\
"score": 310,\
"teams": [\
"frc254",\
"frc1678",\
"frc973"]}}',
score_breakdown_json='{\
"blue": {\
"auto": 70,\
"teleop_goal+foul": 40,\
"assist": 120,\
"truss+catch": 40\
},"red": {\
"auto": 70,\
"teleop_goal+foul": 50,\
"assist": 150,\
"truss+catch": 40}}',
)
match.put()
match2 = Match(
id="2016necmp_f1m2",
event=ndb.Key(Event, "2016necmp"),
year=2016,
comp_level="f",
set_number=1,
match_number=2,
team_key_names=[
"frc846",
"frc2135",
"frc971",
"frc254",
"frc1678",
"frc973",
],
time=datetime.fromtimestamp(1409527874),
time_string="4:31 PM",
youtube_videos=["JbwUzl3W9ug"],
tba_videos=[],
alliances_json='{\
"blue": {\
"score": 270,\
"teams": [\
"frc846",\
"frc2135",\
"frc971"]},\
"red": {\
"score": 310,\
"teams": [\
"frc254",\
"frc1678",\
"frc973"]}}',
score_breakdown_json='{\
"blue": {\
"auto": 70,\
"teleop_goal+foul": 40,\
"assist": 120,\
"truss+catch": 40\
},"red": {\
"auto": 70,\
"teleop_goal+foul": 50,\
"assist": 150,\
"truss+catch": 40}}',
)
match2.put()
@pytest.fixture
def login_user_with_permission(login_user):
login_user.permissions = [AccountPermission.REVIEW_MEDIA]
return login_user
def get_suggestion_queue(web_client: Client) -> List[str]:
response = web_client.get("/suggest/match/video/review")
assert response.status_code == 200
soup = BeautifulSoup(response.data, "html.parser")
review_form = soup.find(id="review_videos")
assert review_form is not None
suggestions = review_form.find_all(class_="suggestion-item")
queue = []
for suggestion in suggestions:
accept_button = suggestion.find(
"input",
attrs={
"name": re.compile("accept_reject-.*"),
"value": re.compile("accept::.*"),
},
)
assert accept_button is not None
reject_button = suggestion.find(
"input",
attrs={
"name": re.compile("accept_reject-.*"),
"value": re.compile("reject::.*"),
},
)
assert reject_button is not None
match_key = suggestion.find("input", attrs={"name": re.compile("key-.*")})
assert match_key is not None
queue.append(accept_button["value"].split("::")[1])
return queue
def createSuggestion(logged_in_user) -> str:
status = SuggestionCreator.createMatchVideoYouTubeSuggestion(
logged_in_user.account_key, "H-54KMwMKY0", "2016necmp_f1m1"
)
assert status == SuggestionCreationStatus.SUCCESS
return Suggestion.render_media_key_name(
2016, "match", "2016necmp_f1m1", "youtube", "H-54KMwMKY0"
)
def test_login_redirect(web_client: Client) -> None:
response = web_client.get("/suggest/match/video/review")
assert response.status_code == 302
assert urlparse(response.headers["Location"]).path == "/account/login"
def test_no_permissions(login_user, web_client: Client) -> None:
response = web_client.get("/suggest/match/video/review")
assert response.status_code == 401
def test_nothing_to_review(login_user_with_permission, web_client: Client) -> None:
queue = get_suggestion_queue(web_client)
assert queue == []
def test_accept_suggestion(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/match/video/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
},
follow_redirects=True,
)
assert response.status_code == 200
# Make sure we mark the Suggestion as REVIEWED
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
# Make sure the video gets associated
match = Match.get_by_id("2016necmp_f1m1")
assert match is not None
assert match.youtube_videos is not None
assert "H-54KMwMKY0" in match.youtube_videos
def test_accept_new_key(
login_user_with_permission,
ndb_stub,
web_client: Client,
taskqueue_stub,
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/match/video/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
f"key-{suggestion_id}": "2016necmp_f1m2",
},
follow_redirects=True,
)
assert response.status_code == 200
# Make sure we mark the Suggestion as REVIEWED
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_ACCEPTED
# Make sure the video gets associated
match = Match.get_by_id("2016necmp_f1m2")
assert match is not None
assert match.youtube_videos is not None
assert "H-54KMwMKY0" in match.youtube_videos
# Make sure we don't add it to the first match
match = Match.get_by_id("2016necmp_f1m1")
assert match is not None
assert match.youtube_videos is not None
assert "H-54KMwMKY0" not in match.youtube_videos
def test_accept_bad_key(
login_user_with_permission, ndb_stub, web_client: Client
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/match/video/review",
data={
f"accept_reject-{suggestion_id}": f"accept::{suggestion_id}",
f"key-{suggestion_id}": "2016necmp_f1m3", # this match doesn't exist
},
follow_redirects=True,
)
assert response.status_code == 200
# Make sure we don't mark the Suggestion as REVIEWED
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_PENDING
# Make sure the video doesn't get associated
match = Match.get_by_id("2016necmp_f1m1")
assert match is not None
assert match.youtube_videos is not None
assert "H-54KMwMKY0" not in match.youtube_videos
def test_reject_suggestion(
login_user_with_permission, ndb_stub, web_client: Client
) -> None:
suggestion_id = createSuggestion(login_user_with_permission)
queue = get_suggestion_queue(web_client)
assert queue == [suggestion_id]
response = web_client.post(
"/suggest/match/video/review",
data={
f"accept_reject-{suggestion_id}": f"reject::{suggestion_id}",
},
follow_redirects=True,
)
assert response.status_code == 200
# Make sure we mark the Suggestion as REVIEWED
suggestion = Suggestion.get_by_id(suggestion_id)
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_REJECTED
# Make sure the video gets associated
match = Match.get_by_id("2016necmp_f1m1")
assert match is not None
assert "H-54KMwMKY0" not in match.youtube_videos
| mit | 3449b0631d65aebcf5718f32d487d2e8 | 30.003086 | 83 | 0.587456 | 3.61461 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/web/handlers/suggestions/tests/suggest_event_webcast_controller_test.py | 1 | 6166 | from datetime import datetime
from typing import cast, List
from urllib.parse import urlparse
import pytest
from bs4 import BeautifulSoup
from werkzeug.test import Client
from backend.common.consts.event_type import EventType
from backend.common.consts.suggestion_state import SuggestionState
from backend.common.consts.webcast_type import WebcastType
from backend.common.models.event import Event
from backend.common.models.suggestion import Suggestion
from backend.common.models.webcast import Webcast
from backend.web.handlers.conftest import CapturedTemplate
@pytest.fixture(autouse=True)
def createEvent(ndb_stub) -> None:
event = Event(
id="2016necmp",
name="New England District Championship",
event_type_enum=EventType.DISTRICT_CMP,
short_name="New England",
event_short="necmp",
year=2016,
end_date=datetime(2016, 3, 27),
official=False,
city="Hartford",
state_prov="CT",
country="USA",
venue="Some Venue",
venue_address="Some Venue, Hartford, CT, USA",
timezone_id="America/New_York",
start_date=datetime(2016, 3, 24),
webcast_json="",
website="http://www.firstsv.org",
)
event.put()
def assert_template_status(
captured_templates: List[CapturedTemplate], status: str
) -> None:
template = captured_templates[0][0]
context = captured_templates[0][1]
assert template.name == "suggestions/suggest_event_webcast.html"
assert context["status"] == status
def test_login_redirect(web_client: Client) -> None:
response = web_client.get("/suggest/event/webcast?event_key=2016necmp")
assert response.status_code == 302
assert urlparse(response.headers["Location"]).path == "/account/login"
def test_get_form_bad_event(login_user, web_client: Client) -> None:
response = web_client.get("/suggest/event/webcast?event_key=2016asdf")
assert response.status_code == 404
def test_get_form(login_user, web_client: Client) -> None:
response = web_client.get("/suggest/event/webcast?event_key=2016necmp")
assert response.status_code == 200
soup = BeautifulSoup(response.data, "html.parser")
form = soup.find("form", id="suggest_webcast")
assert form is not None
assert form["action"] == "/suggest/event/webcast"
assert form["method"] == "post"
csrf = form.find(attrs={"name": "csrf_token"})
assert csrf is not None
assert csrf["type"] == "hidden"
assert csrf["value"] is not None
event_key = form.find(attrs={"name": "event_key"})
assert event_key is not None
assert event_key["type"] == "hidden"
assert event_key["value"] == "2016necmp"
assert form.find(attrs={"name": "webcast_url"}) is not None
assert form.find(attrs={"name": "webcast_date"}) is not None
assert form.find("button", type="submit") is not None
def test_submit_no_event(login_user, ndb_stub, web_client: Client) -> None:
resp = web_client.post("/suggest/event/webcast", data={}, follow_redirects=True)
assert resp.status_code == 404
# Assert no suggestions were written
assert Suggestion.query().fetch() == []
def test_submit_empty_form(
login_user,
ndb_stub,
web_client: Client,
captured_templates: List[CapturedTemplate],
) -> None:
resp = web_client.post(
"/suggest/event/webcast", data={"event_key": "2016necmp"}, follow_redirects=True
)
assert resp.status_code == 200
assert_template_status(captured_templates, "blank_webcast")
# Assert the correct dialog shows
soup = BeautifulSoup(resp.data, "html.parser")
assert soup.find(id="blank_webcast-alert") is not None
# Assert no suggestions were written
assert Suggestion.query().fetch() == []
def test_submit_bad_url(
login_user,
ndb_stub,
web_client: Client,
captured_templates: List[CapturedTemplate],
) -> None:
resp = web_client.post(
"/suggest/event/webcast",
data={"event_key": "2016necmp", "webcast_url": "The Blue Alliance"},
follow_redirects=True,
)
assert resp.status_code == 200
assert_template_status(captured_templates, "invalid_url")
# Assert the correct dialog shows
soup = BeautifulSoup(resp.data, "html.parser")
assert soup.find(id="invalid_url-alert") is not None
# Assert no suggestions were written
assert Suggestion.query().fetch() == []
def test_submit_tba_url(
login_user,
ndb_stub,
web_client: Client,
captured_templates: List[CapturedTemplate],
) -> None:
resp = web_client.post(
"/suggest/event/webcast",
data={"event_key": "2016necmp", "webcast_url": "http://thebluealliance.com"},
follow_redirects=True,
)
assert resp.status_code == 200
assert_template_status(captured_templates, "invalid_url")
# Assert the correct dialog shows
soup = BeautifulSoup(resp.data, "html.parser")
assert soup.find(id="invalid_url-alert") is not None
# Assert no suggestions were written
assert Suggestion.query().fetch() == []
def test_submit_webcast(
login_user,
ndb_stub,
web_client: Client,
captured_templates: List[CapturedTemplate],
) -> None:
resp = web_client.post(
"/suggest/event/webcast",
data={
"event_key": "2016necmp",
"webcast_url": "https://twitch.tv/frcgamesense",
"webcast_date": "",
},
follow_redirects=True,
)
assert resp.status_code == 200
assert_template_status(captured_templates, "success")
# Assert the correct dialog shows
soup = BeautifulSoup(resp.data, "html.parser")
assert soup.find(id="success-alert") is not None
# Make sure the Suggestion gets created
suggestion = cast(Suggestion, Suggestion.query().fetch()[0])
assert suggestion is not None
assert suggestion.review_state == SuggestionState.REVIEW_PENDING
assert suggestion.target_key == "2016necmp"
assert suggestion.contents["webcast_url"] == "https://twitch.tv/frcgamesense"
assert suggestion.contents.get("webcast_dict") == Webcast(
type=WebcastType.TWITCH, channel="frcgamesense"
)
| mit | 1e1e229e6ea49648eac519dbb1ab23cb | 31.624339 | 88 | 0.670289 | 3.595335 | false | true | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/helpers/match_tiebreakers.py | 1 | 13633 | from __future__ import annotations
from typing import Dict, List, Optional, Tuple
from pyre_extensions import none_throws
from backend.common.consts.alliance_color import (
AllianceColor,
TMatchWinner,
)
from backend.common.consts.comp_level import CompLevel, ELIM_LEVELS
from backend.common.models.match import Match
# Tuples of (red_tiebreaker, blue_tiebreaker) or None. Higher value wins.
TCriteria = Optional[Tuple[int, int]]
class MatchTiebreakers(object):
@classmethod
def tiebreak_winner(cls, match: Match) -> TMatchWinner:
"""
Compute elim winner using tiebreakers
"""
if match.comp_level not in ELIM_LEVELS or match.score_breakdown is None:
return ""
if AllianceColor.RED not in none_throws(
match.score_breakdown
) or AllianceColor.BLUE not in none_throws(match.score_breakdown):
return ""
red_breakdown = none_throws(match.score_breakdown)[AllianceColor.RED]
blue_breakdown = none_throws(match.score_breakdown)[AllianceColor.BLUE]
tiebreakers: List[TCriteria]
if match.year == 2016:
tiebreakers = cls._tiebreak_2016(red_breakdown, blue_breakdown)
elif match.year == 2017 and not (
match.comp_level == CompLevel.F and match.match_number <= 3
): # Finals can't be tiebroken. Only overtime
tiebreakers = cls._tiebreak_2017(red_breakdown, blue_breakdown)
elif match.year == 2019 and not (
match.comp_level == CompLevel.F and match.match_number <= 3
): # Finals can't be tiebroken. Only overtime
tiebreakers = cls._tiebreak_2019(red_breakdown, blue_breakdown)
elif match.year == 2020 and not (
match.comp_level == CompLevel.F and match.match_number <= 3
): # Finals can't be tiebroken. Only overtime
tiebreakers = cls._tiebreak_2020(red_breakdown, blue_breakdown)
elif match.year == 2022 and not (
match.comp_level == CompLevel.F and match.match_number <= 3
): # Finals can't be tiebroken. Only overtime
tiebreakers = cls._tiebreak_2022(red_breakdown, blue_breakdown)
else:
tiebreakers = []
for tiebreaker in tiebreakers:
if tiebreaker is None:
return ""
elif tiebreaker[0] > tiebreaker[1]:
return AllianceColor.RED
elif tiebreaker[1] > tiebreaker[0]:
return AllianceColor.BLUE
return ""
@classmethod
def _tiebreak_2022(
cls, red_breakdown: Dict, blue_breakdown: Dict
) -> List[TCriteria]:
tiebreakers: List[TCriteria] = []
# Cumulative FOUL and TECH FOUL points due to opponent rule violations
if "foulPoints" in red_breakdown and "foulPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["foulPoints"], blue_breakdown["foulPoints"])
)
else:
tiebreakers.append(None)
# Cumulative HANGAR points
if "endgamePoints" in red_breakdown and "endgamePoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["endgamePoints"], blue_breakdown["endgamePoints"])
)
else:
tiebreakers.append(None)
# Cumulative AUTO TAXI + CARGO points
if "autoPoints" in red_breakdown and "autoPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["autoPoints"], blue_breakdown["autoPoints"])
)
else:
tiebreakers.append(None)
return tiebreakers
@classmethod
def _tiebreak_2020(
cls, red_breakdown: Dict, blue_breakdown: Dict
) -> List[TCriteria]:
tiebreakers: List[TCriteria] = []
# Cumulative FOUL and TECH FOUL points due to opponent rule violations
if "foulPoints" in red_breakdown and "foulPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["foulPoints"], blue_breakdown["foulPoints"])
)
else:
tiebreakers.append(None)
# Cumulative AUTO points
if "autoPoints" in red_breakdown and "autoPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["autoPoints"], blue_breakdown["autoPoints"])
)
else:
tiebreakers.append(None)
# Cumulative ENDGAME points
if "endgamePoints" in red_breakdown and "endgamePoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["endgamePoints"], blue_breakdown["endgamePoints"])
)
else:
tiebreakers.append(None)
# Cumulative TELEOP POWER CELL and CONTROL PANEL points
if (
"teleopCellPoints" in red_breakdown
and "teleopCellPoints" in blue_breakdown
and "controlPanelPoints" in red_breakdown
and "controlPanelPoints" in blue_breakdown
):
tiebreakers.append(
(
red_breakdown["teleopCellPoints"]
+ red_breakdown["controlPanelPoints"],
blue_breakdown["teleopCellPoints"]
+ blue_breakdown["controlPanelPoints"],
)
)
else:
tiebreakers.append(None)
return tiebreakers
@classmethod
def _tiebreak_2019(
cls, red_breakdown: Dict, blue_breakdown: Dict
) -> List[TCriteria]:
tiebreakers: List[TCriteria] = []
# Greater number of FOUL points awarded (i.e. the ALLIANCE that played the cleaner MATCH)
if "foulPoints" in red_breakdown and "foulPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["foulPoints"], blue_breakdown["foulPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored CARGO points
if "cargoPoints" in red_breakdown and "cargoPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["cargoPoints"], blue_breakdown["cargoPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored HATCH PANEL points
if "hatchPanelPoints" in red_breakdown and "hatchPanelPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["hatchPanelPoints"], blue_breakdown["hatchPanelPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored HAB CLIMB points
if "habClimbPoints" in red_breakdown and "habClimbPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["habClimbPoints"], blue_breakdown["habClimbPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored SANDSTORM BONUS points
if (
"sandStormBonusPoints" in red_breakdown
and "sandStormBonusPoints" in blue_breakdown
):
tiebreakers.append(
(
red_breakdown["sandStormBonusPoints"],
blue_breakdown["sandStormBonusPoints"],
)
)
else:
tiebreakers.append(None)
return tiebreakers
@classmethod
def _tiebreak_2017(
cls, red_breakdown: Dict, blue_breakdown: Dict
) -> List[TCriteria]:
tiebreakers: List[TCriteria] = []
# Greater number of FOUL points awarded (i.e. the ALLIANCE that played the cleaner MATCH)
if "foulPoints" in red_breakdown and "foulPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["foulPoints"], blue_breakdown["foulPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored AUTO points
if "autoPoints" in red_breakdown and "autoPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["autoPoints"], blue_breakdown["autoPoints"])
)
else:
tiebreakers.append(None)
# Cumulative ROTOR engagement score (AUTO and TELEOP)
if (
"autoRotorPoints" in red_breakdown
and "autoRotorPoints" in blue_breakdown
and "teleopRotorPoints" in red_breakdown
and "teleopRotorPoints" in blue_breakdown
):
red_rotor = (
red_breakdown["autoRotorPoints"] + red_breakdown["teleopRotorPoints"]
)
blue_rotor = (
blue_breakdown["autoRotorPoints"] + blue_breakdown["teleopRotorPoints"]
)
tiebreakers.append((red_rotor, blue_rotor))
else:
tiebreakers.append(None)
# Cumulative TOUCHPAD score
if (
"teleopTakeoffPoints" in red_breakdown
and "teleopTakeoffPoints" in blue_breakdown
):
tiebreakers.append(
(
red_breakdown["teleopTakeoffPoints"],
blue_breakdown["teleopTakeoffPoints"],
)
)
else:
tiebreakers.append(None)
# Total accumulated pressure
if (
"autoFuelPoints" in red_breakdown
and "autoFuelPoints" in blue_breakdown
and "teleopFuelPoints" in red_breakdown
and "teleopFuelPoints" in blue_breakdown
):
red_pressure = (
red_breakdown["autoFuelPoints"] + red_breakdown["teleopFuelPoints"]
)
blue_pressure = (
blue_breakdown["autoFuelPoints"] + blue_breakdown["teleopFuelPoints"]
)
tiebreakers.append((red_pressure, blue_pressure))
else:
tiebreakers.append(None)
return tiebreakers
@classmethod
def _tiebreak_2016(
cls, red_breakdown: Dict, blue_breakdown: Dict
) -> List[TCriteria]:
tiebreakers: List[TCriteria] = []
# Greater number of FOUL points awarded (i.e. the ALLIANCE that played the cleaner MATCH)
if "foulPoints" in red_breakdown and "foulPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["foulPoints"], blue_breakdown["foulPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of BREACH and CAPTURE points
if (
"breachPoints" in red_breakdown
and "breachPoints" in blue_breakdown
and "capturePoints" in red_breakdown
and "capturePoints" in blue_breakdown
):
red_breach_capture = (
red_breakdown["breachPoints"] + red_breakdown["capturePoints"]
)
blue_breach_capture = (
blue_breakdown["breachPoints"] + blue_breakdown["capturePoints"]
)
tiebreakers.append((red_breach_capture, blue_breach_capture))
else:
tiebreakers.append(None)
# Cumulative sum of scored AUTO points
if "autoPoints" in red_breakdown and "autoPoints" in blue_breakdown:
tiebreakers.append(
(red_breakdown["autoPoints"], blue_breakdown["autoPoints"])
)
else:
tiebreakers.append(None)
# Cumulative sum of scored SCALE and CHALLENGE points
if (
"teleopScalePoints" in red_breakdown
and "teleopScalePoints" in blue_breakdown
and "teleopChallengePoints" in red_breakdown
and "teleopChallengePoints" in blue_breakdown
):
red_scale_challenge = (
red_breakdown["teleopScalePoints"]
+ red_breakdown["teleopChallengePoints"]
)
blue_scale_challenge = (
blue_breakdown["teleopScalePoints"]
+ blue_breakdown["teleopChallengePoints"]
)
tiebreakers.append((red_scale_challenge, blue_scale_challenge))
else:
tiebreakers.append(None)
# Cumulative sum of scored TOWER GOAL points (High and Low goals from AUTO and TELEOP)
if (
"autoBoulderPoints" in red_breakdown
and "autoBoulderPoints" in blue_breakdown
and "teleopBoulderPoints" in red_breakdown
and "teleopBoulderPoints" in blue_breakdown
):
red_boulder = (
red_breakdown["autoBoulderPoints"]
+ red_breakdown["teleopBoulderPoints"]
)
blue_boulder = (
blue_breakdown["autoBoulderPoints"]
+ blue_breakdown["teleopBoulderPoints"]
)
tiebreakers.append((red_boulder, blue_boulder))
else:
tiebreakers.append(None)
# Cumulative sum of CROSSED UNDAMAGED DEFENSE points (AUTO and TELEOP)
if (
"autoCrossingPoints" in red_breakdown
and "autoCrossingPoints" in blue_breakdown
and "teleopCrossingPoints" in red_breakdown
and "teleopCrossingPoints" in blue_breakdown
):
red_crossing = (
red_breakdown["autoCrossingPoints"]
+ red_breakdown["teleopCrossingPoints"]
)
blue_crossing = (
blue_breakdown["autoCrossingPoints"]
+ blue_breakdown["teleopCrossingPoints"]
)
tiebreakers.append((red_crossing, blue_crossing))
else:
tiebreakers.append(None)
return tiebreakers
| mit | 007ad6ee08a2885a0c31d431cbefb8c4 | 35.549598 | 97 | 0.581163 | 3.848955 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/tasks_io/datafeeds/parsers/fms_api/fms_api_event_rankings_parser.py | 1 | 1337 | from typing import Any, Dict, List, Optional
from backend.common.helpers.rankings_helper import RankingsHelper
from backend.common.models.event_ranking import EventRanking
from backend.tasks_io.datafeeds.parsers.json.parser_json import ParserJSON
class FMSAPIEventRankingsParser(ParserJSON[List[EventRanking]]):
def __init__(self, year: int) -> None:
self.year = year
def parse(self, response: Dict[str, Any]) -> Optional[List[EventRanking]]:
rankings = []
for team in response["Rankings"]:
sort_orders = []
count = 1
order_name = "sortOrder{}".format(count)
while order_name in team:
sort_orders.append(team[order_name])
count += 1
order_name = "sortOrder{}".format(count)
rankings.append(
RankingsHelper.build_ranking(
self.year,
team["rank"],
"frc{}".format(team["teamNumber"]),
team["wins"],
team["losses"],
team["ties"],
team["qualAverage"],
team["matchesPlayed"],
team["dq"],
sort_orders,
)
)
return rankings if rankings else None
| mit | 0542cdfda22773f59d5d97895f988a55 | 32.425 | 78 | 0.524308 | 4.594502 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/web/handlers/admin/sitevars.py | 1 | 1171 | from flask import redirect, request, url_for
from werkzeug.wrappers import Response
from backend.common.models.sitevar import Sitevar
from backend.web.profiled_render import render_template
def sitevars_list() -> str:
sitevars = Sitevar.query().fetch(10000)
template_values = {
"sitevars": sitevars,
}
return render_template("admin/sitevar_list.html", template_values)
def sitevar_edit(sitevar_key: str) -> str:
sitevar = Sitevar.get_by_id(sitevar_key)
success = request.args.get("success")
template_values = {
"sitevar": sitevar,
"success": success,
}
return render_template("admin/sitevar_edit.html", template_values)
def sitevar_edit_post(sitevar_key: str) -> Response:
# note, we don't use sitevar_key
sitevar = Sitevar(
id=request.form.get("key"),
description=request.form.get("description"),
values_json=request.form.get("values_json"),
)
sitevar.put()
return redirect(
url_for("admin.sitevar_edit", sitevar_key=sitevar.key.id(), success="true")
)
def sitevar_create() -> str:
return render_template("admin/sitevar_create.html")
| mit | c371f86d8824b051e232567d3fcea903 | 23.914894 | 83 | 0.668659 | 3.474777 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/helpers/event_team_status_helper.py | 1 | 19941 | import copy
from google.appengine.ext import ndb
from google.appengine.ext.ndb.tasklets import Future
from consts.playoff_type import PlayoffType
from helpers.match_helper import MatchHelper
from helpers.rankings_helper import RankingsHelper
from helpers.team_helper import TeamHelper
from models.event_details import EventDetails
from models.match import Match
class EventTeamStatusHelper(object):
@classmethod
def generate_team_at_event_alliance_status_string(cls, team_key, status_dict):
if not status_dict:
return '--'
alliance = status_dict.get('alliance')
if alliance:
pick = alliance['pick']
if pick == 0:
pick = 'Captain'
else:
# Convert to ordinal number http://stackoverflow.com/questions/9647202/ordinal-numbers-replacement
pick = '{} Pick'.format("%d%s" % (pick,"tsnrhtdd"[(pick/10%10!=1)*(pick%10<4)*pick%10::4]))
backup = alliance['backup']
if backup and team_key == backup['in']:
pick = 'Backup'
return '<b>{}</b> of <b>{}</b>'.format(pick, alliance['name'])
else:
return '--'
@classmethod
def generate_team_at_event_playoff_status_string(cls, team_key, status_dict):
if not status_dict:
return '--'
playoff = status_dict.get('playoff')
if playoff:
level = playoff.get('level')
status = playoff.get('status')
record = playoff.get('record')
level_record = playoff.get('current_level_record')
playoff_average = playoff.get('playoff_average')
if status == 'playing':
record_str = '{}-{}-{}'.format(level_record['wins'], level_record['losses'], level_record['ties'])
playoff_str = 'Currently <b>{}</b> in the <b>{}</b>'.format(record_str, Match.COMP_LEVELS_VERBOSE_FULL[level])
else:
if status == 'won':
if level == 'f':
playoff_str = '<b>Won the event</b>'
else:
playoff_str = '<b>Won the {}</b>'.format(Match.COMP_LEVELS_VERBOSE_FULL[level])
elif status == 'eliminated':
playoff_str = '<b>Eliminated in the {}</b>'.format(Match.COMP_LEVELS_VERBOSE_FULL[level])
else:
raise Exception("Unknown playoff status: {}".format(status))
if record:
playoff_str += ' with a playoff record of <b>{}-{}-{}</b>'.format(record['wins'], record['losses'], record['ties'])
if playoff_average:
playoff_str += ' with a playoff average of <b>{:.1f}</b>'.format(playoff_average)
return playoff_str
else:
return '--'
@classmethod
def generate_team_at_event_status_string(cls, team_key, status_dict, formatting=True, event=None, include_team=True, verbose=False):
"""
Generate a team at event status string from a status dict
"""
if include_team:
default_msg = 'Team {} is waiting for the {} to begin.'.format(team_key[3:], event.normalized_name if event else 'event')
else:
default_msg = 'is waiting for the {} to begin.'.format(event.normalized_name if event else 'event')
if not status_dict:
return default_msg
qual = status_dict.get('qual')
alliance = status_dict.get('alliance')
playoff = status_dict.get('playoff')
components = []
if qual:
status = qual.get('status')
num_teams = qual.get('num_teams')
ranking = qual.get('ranking')
if ranking:
rank = ranking.get('rank')
record = ranking.get('record')
qual_average = ranking.get('qual_average')
num_teams_str = ''
if num_teams:
num_teams_str = ' of {}'.format(num_teams) if verbose else '/{}'.format(num_teams)
if status == 'completed':
is_tense = 'was'
has_tense = 'had'
else:
is_tense = 'is'
has_tense = 'has'
qual_str = None
if record:
if verbose:
record_str = cls._build_verbose_record(record)
else:
record_str = '{}-{}-{}'.format(record['wins'], record['losses'], record['ties'])
if rank:
qual_str = '{} <b>Rank {}{}</b> with a record of <b>{}</b> in quals'.format(is_tense, rank, num_teams_str, record_str)
else:
qual_str = '{} a record of <b>{}</b> in quals'.format(has_tense, record_str)
elif qual_average:
if rank:
qual_str = '{} <b>Rank {}{}</b> with an average score of <b>{:.1f}</b> in quals'.format(is_tense, rank, num_teams_str, qual_average)
else:
qual_str = '{} an average score of <b>{:.1f}</b> in quals'.format(has_tense, qual_average)
if qual_str:
components.append(qual_str)
pick = None
if alliance:
pick = alliance['pick']
if pick == 0:
pick = 'Captain'
else:
# Convert to ordinal number http://stackoverflow.com/questions/9647202/ordinal-numbers-replacement
pick = '{} Pick'.format("%d%s" % (pick,"tsnrhtdd"[(pick/10%10!=1)*(pick%10<4)*pick%10::4]))
backup = alliance['backup']
if backup and team_key == backup['in']:
pick = 'Backup'
if not playoff:
alliance_str = 'will be competing in the playoffs as the <b>{}</b> of <b>{}</b>'.format(pick, alliance['name'])
components.append(alliance_str)
if playoff:
level = playoff.get('level')
status = playoff.get('status')
record = playoff.get('record')
level_record = playoff.get('current_level_record')
playoff_average = playoff.get('playoff_average')
if status == 'playing':
if verbose:
record_str = cls._build_verbose_record(level_record)
else:
record_str = '{}-{}-{}'.format(level_record['wins'], level_record['losses'], level_record['ties'])
playoff_str = 'is <b>{}</b> in the <b>{}</b>'.format(record_str, Match.COMP_LEVELS_VERBOSE_FULL[level])
if alliance:
playoff_str += ' as the <b>{}</b> of <b>{}</b>'.format(pick, alliance['name'])
components = [playoff_str]
else:
if alliance:
components.append('competed in the playoffs as the <b>{}</b> of <b>{}</b>'.format(pick, alliance['name']))
if status == 'won':
if level == 'f':
playoff_str = '<b>won the event</b>'
else:
playoff_str = '<b>won the {}</b>'.format(Match.COMP_LEVELS_VERBOSE_FULL[level])
elif status == 'eliminated':
playoff_str = 'was <b>eliminated in the {}</b>'.format(Match.COMP_LEVELS_VERBOSE_FULL[level])
else:
raise Exception("Unknown playoff status: {}".format(status))
if record:
playoff_str += ' with a playoff record of <b>{}-{}-{}</b>'.format(record['wins'], record['losses'], record['ties'])
if playoff_average:
playoff_str += ' with a playoff average of <b>{:.1f}</b>'.format(playoff_average)
components.append(playoff_str)
if not components:
return default_msg
if len(components) > 1:
components[-1] = 'and {}'.format(components[-1])
if len(components) > 2:
join_str = ', '
else:
join_str = ' '
if include_team:
final_string = 'Team {} {}'.format(team_key[3:], join_str.join(components))
else:
final_string = '{}'.format(join_str.join(components))
if event:
final_string += ' at the {}.'.format(event.normalized_name)
else:
final_string += '.'
return final_string if formatting else final_string.replace('<b>', '').replace('</b>', '')
@classmethod
def generate_team_at_event_status(cls, team_key, event, matches=None):
"""
Generate a dict containing team@event status information
:param team_key: Key name of the team to focus on
:param event: Event object
:param matches: Organized matches (via MatchHelper.organized_matches) from the event, optional
"""
event_details = event.details
if not matches:
matches = event.matches
team_matches = [m for m in matches if team_key in m.team_key_names]
next_match = MatchHelper.upcomingMatches(team_matches, num=1)
last_match = MatchHelper.recent_matches(team_matches, num=1)
matches = MatchHelper.organized_matches(matches)
return copy.deepcopy({
'qual': cls._build_qual_info(team_key, event_details, matches, event.year),
'alliance': cls._build_alliance_info(team_key, event_details, matches),
'playoff': cls._build_playoff_info(team_key, event_details, matches, event.year, event.playoff_type),
'last_match_key': last_match[0].key_name if last_match else None,
'next_match_key': next_match[0].key_name if next_match else None,
}) # TODO: Results are getting mixed unless copied. 2017-02-03 -fangeugene
@classmethod
def _build_qual_info(cls, team_key, event_details, matches, year):
if not matches['qm']:
status = 'not_started'
else:
status = 'completed'
for match in matches['qm']:
if not match.has_been_played:
status = 'playing'
break
if event_details and event_details.rankings2:
rankings = event_details.rankings2
qual_info = None
for ranking in rankings:
if ranking['team_key'] == team_key:
qual_info = {
'status': status,
'ranking': ranking,
}
break
if qual_info:
qual_info['num_teams'] = len(rankings)
qual_info['sort_order_info'] = RankingsHelper.get_sort_order_info(event_details)
return qual_info
else:
# Use matches as fallback
all_teams = set()
wins = 0
losses = 0
ties = 0
qual_score_sum = 0
matches_played = 0
for match in matches['qm']:
for color in ['red', 'blue']:
for team in match.alliances[color]['teams']:
all_teams.add(team)
if team == team_key and match.has_been_played and \
team_key not in match.alliances[color]['surrogates']:
matches_played += 1
if match.winning_alliance == color:
wins += 1
elif match.winning_alliance == '':
ties += 1
else:
losses += 1
qual_score_sum += match.alliances[color]['score']
qual_average = float(qual_score_sum) / matches_played if matches_played else 0
if team_key in all_teams:
return {
'status': status,
'ranking': {
'rank': None,
'matches_played': matches_played,
'dq': None,
'record': {
'wins': wins,
'losses': losses,
'ties': ties,
} if year != 2015 else None,
'qual_average': qual_average if year == 2015 else None,
'sort_orders': None,
'team_key': team_key,
},
'num_teams': len(all_teams),
'sort_order_info': None
}
else:
return None
@classmethod
def _build_alliance_info(cls, team_key, event_details, matches):
if not event_details or not event_details.alliance_selections:
return None
alliance, number = cls._get_alliance(team_key, event_details, matches)
if not alliance:
return None
# Calculate the role played by the team on the alliance
backup_info = alliance.get('backup', {}) if alliance.get('backup') else {}
pick = -1 if team_key == backup_info.get('in', "") else None
for i, team in enumerate(alliance['picks']):
if team == team_key:
pick = i
break
return {
'pick': pick,
'name': alliance.get('name', "Alliance {}".format(number)),
'number': number,
'backup': alliance.get('backup'),
}
@classmethod
def _build_playoff_info(cls, team_key, event_details, matches, year, playoff_type):
# Matches needs to be all playoff matches at the event, to properly account for backups
import numpy as np
alliance, _ = cls._get_alliance(team_key, event_details, matches)
complete_alliance = set(alliance['picks']) if alliance else set()
if alliance and alliance.get('backup'):
complete_alliance.add(alliance['backup']['in'])
is_bo5 = playoff_type == PlayoffType.BO5_FINALS
all_wins = 0
all_losses = 0
all_ties = 0
playoff_scores = []
status = None
for comp_level in ['f', 'sf', 'qf', 'ef']: # playoffs
if matches[comp_level]:
level_wins = 0
level_losses = 0
level_ties = 0
level_matches = 0
level_played = 0
for match in matches[comp_level]:
for color in ['red', 'blue']:
match_alliance = set(match.alliances[color]['teams'])
if len(match_alliance.intersection(complete_alliance)) >= 2:
playoff_scores.append(match.alliances[color]['score'])
level_matches += 1
if match.has_been_played:
if match.winning_alliance == color:
level_wins += 1
all_wins += 1
elif not match.winning_alliance:
if not (year == 2015 and comp_level != 'f'):
# The match was a tie
level_ties += 1
all_ties += 1
else:
level_losses += 1
all_losses += 1
level_played += 1
if not status:
# Only set this for the first comp level that gets this far,
# But run through the rest to calculate the full record
if level_wins == (3 if is_bo5 else 2):
status = {
'status': 'won',
'level': comp_level,
}
elif level_losses == (3 if is_bo5 else 2):
status = {
'status': 'eliminated',
'level': comp_level
}
elif level_matches > 0:
if year == 2015:
# This only works for past events, but 2015 is in the past so this works
status = {
'status': 'eliminated',
'level': comp_level,
}
else:
status = {
'status': 'playing',
'level': comp_level,
}
if status:
status['current_level_record'] = {
'wins': level_wins,
'losses': level_losses,
'ties': level_ties
} if year != 2015 or comp_level == 'f' else None
if status:
status['record'] = {
'wins': all_wins,
'losses': all_losses,
'ties': all_ties
} if year != 2015 else None
status['playoff_average'] = np.mean(playoff_scores) if year == 2015 else None
return status
@classmethod
def _get_alliance(cls, team_key, event_details, matches):
"""
Get the alliance number of the team
Returns 0 when the team is not on an alliance
"""
if event_details and event_details.alliance_selections:
for i, alliance in enumerate(event_details.alliance_selections):
alliance_number = i + 1
if team_key in alliance['picks']:
return alliance, alliance_number
backup_info = alliance.get('backup') if alliance.get('backup') else {}
if team_key == backup_info.get('in', ""):
# If this team came in as a backup team
return alliance, alliance_number
else:
# No event_details. Use matches to generate alliances.
complete_alliances = []
for comp_level in Match.ELIM_LEVELS:
for match in matches[comp_level]:
for color in ['red', 'blue']:
alliance = copy.copy(match.alliances[color]['teams'])
for i, complete_alliance in enumerate(complete_alliances): # search for alliance. could be more efficient
if len(set(alliance).intersection(set(complete_alliance))) >= 2: # if >= 2 teams are the same, then the alliance is the same
backups = list(set(alliance).difference(set(complete_alliance)))
complete_alliances[i] += backups # ensures that backup robots are listed last
break
else:
complete_alliances.append(alliance)
for complete_alliance in complete_alliances:
if team_key in complete_alliance:
return {'picks': complete_alliance}, None # Alliance number is unknown
alliance_number = 0
return None, alliance_number # Team didn't make it to elims
@classmethod
def _build_verbose_record(cls, record):
win_label = 'wins' if record['wins'] != 1 else 'win'
loss_label = 'losses' if record['losses'] != 1 else 'loss'
tie_label = 'ties' if record['ties'] != 1 else 'tie'
return '{} {}, {} {}, and {} {}'.format(
record['wins'], win_label,
record['losses'], loss_label,
record['ties'], tie_label)
| mit | a535a6eae3564db77534f74d8dd00181 | 43.511161 | 156 | 0.483978 | 4.343498 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/controllers/cron_controller.py | 1 | 13631 | import datetime
import logging
import os
import json
from google.appengine.api import taskqueue
from google.appengine.ext import ndb
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from consts.event_type import EventType
from controllers.api.api_status_controller import ApiStatusController
from database.district_query import DistrictsInYearQuery
from database.event_query import DistrictEventsQuery, EventQuery
from database.match_query import EventMatchesQuery
from database.team_query import DistrictTeamsQuery
from helpers.award_manipulator import AwardManipulator
from helpers.bluezone_helper import BlueZoneHelper
from helpers.district_helper import DistrictHelper
from helpers.district_manipulator import DistrictManipulator
from helpers.event_helper import EventHelper
from helpers.event_manipulator import EventManipulator
from helpers.event_details_manipulator import EventDetailsManipulator
from helpers.event_insights_helper import EventInsightsHelper
from helpers.event_team_manipulator import EventTeamManipulator
from helpers.event_team_status_helper import EventTeamStatusHelper
from helpers.event_team_repairer import EventTeamRepairer
from helpers.event_team_updater import EventTeamUpdater
from helpers.firebase.firebase_pusher import FirebasePusher
from helpers.insights_helper import InsightsHelper
from helpers.match_helper import MatchHelper
from helpers.match_time_prediction_helper import MatchTimePredictionHelper
from helpers.matchstats_helper import MatchstatsHelper
from helpers.notification_helper import NotificationHelper
from helpers.outgoing_notification_helper import OutgoingNotificationHelper
from helpers.playoff_advancement_helper import PlayoffAdvancementHelper
from helpers.prediction_helper import PredictionHelper
from helpers.insight_manipulator import InsightManipulator
from helpers.suggestions.suggestion_fetcher import SuggestionFetcher
from helpers.team_manipulator import TeamManipulator
from helpers.match_manipulator import MatchManipulator
from models.district import District
from models.event import Event
from models.event_details import EventDetails
from models.event_team import EventTeam
from models.match import Match
from models.sitevar import Sitevar
from models.suggestion import Suggestion
from models.team import Team
from models.typeahead_entry import TypeaheadEntry
class EventShortNameCalcEnqueue(webapp.RequestHandler):
"""
Enqueues Event short_name computation for official events
"""
def get(self, year):
event_keys = Event.query(Event.official == True, Event.year == int(year)).fetch(200, keys_only=True)
events = ndb.get_multi(event_keys)
for event in events:
taskqueue.add(
url='/tasks/math/do/event_short_name_calc_do/{}'.format(event.key.id()),
method='GET')
template_values = {'events': events}
path = os.path.join(os.path.dirname(__file__), '../templates/math/event_short_name_calc_enqueue.html')
self.response.out.write(template.render(path, template_values))
class EventShortNameCalcDo(webapp.RequestHandler):
"""
Computes Event short_name
"""
def get(self, event_key):
event = Event.get_by_id(event_key)
event.short_name = EventHelper.getShortName(event.name)
EventManipulator.createOrUpdate(event)
template_values = {'event': event}
path = os.path.join(os.path.dirname(__file__), '../templates/math/event_short_name_calc_do.html')
self.response.out.write(template.render(path, template_values))
class EventTeamRepairDo(webapp.RequestHandler):
"""
Repair broken EventTeams.
"""
def get(self):
event_teams_keys = EventTeam.query(EventTeam.year == None).fetch(keys_only=True)
event_teams = ndb.get_multi(event_teams_keys)
event_teams = EventTeamRepairer.repair(event_teams)
event_teams = EventTeamManipulator.createOrUpdate(event_teams)
# sigh. -gregmarra
if type(event_teams) == EventTeam:
event_teams = [event_teams]
template_values = {
'event_teams': event_teams,
}
path = os.path.join(os.path.dirname(__file__), '../templates/math/eventteam_repair_do.html')
self.response.out.write(template.render(path, template_values))
class FinalMatchesRepairDo(webapp.RequestHandler):
"""
Repairs zero-indexed final matches
"""
def get(self, year):
year_event_keys = Event.query(Event.year == int(year)).fetch(1000, keys_only=True)
final_match_keys = []
for event_key in year_event_keys:
final_match_keys.extend(Match.query(Match.event == event_key, Match.comp_level == 'f').fetch(100, keys_only=True))
match_keys_to_repair = []
for match_key in final_match_keys:
key_name = match_key.id()
if '_f0m' in key_name:
match_keys_to_repair.append(match_key)
deleted_keys = []
matches_to_repair = ndb.get_multi(match_keys_to_repair)
for match in matches_to_repair:
deleted_keys.append(match.key)
event = ndb.get_multi([match.event])[0]
match.set_number = 1
match.key = ndb.Key(Match, Match.render_key_name(
event.key.id(),
match.comp_level,
match.set_number,
match.match_number))
MatchManipulator.createOrUpdate(matches_to_repair)
MatchManipulator.delete_keys(deleted_keys)
template_values = {'deleted_keys': deleted_keys,
'new_matches': matches_to_repair}
path = os.path.join(os.path.dirname(__file__), '../templates/math/final_matches_repair_do.html')
self.response.out.write(template.render(path, template_values))
class YearInsightsEnqueue(webapp.RequestHandler):
"""
Enqueues Insights calculation of a given kind for a given year
"""
def get(self, kind, year):
taskqueue.add(
target='backend-tasks-b2',
url='/backend-tasks-b2/math/do/insights/{}/{}'.format(kind, year),
method='GET')
template_values = {
'kind': kind,
'year': year
}
path = os.path.join(os.path.dirname(__file__), '../templates/math/year_insights_enqueue.html')
self.response.out.write(template.render(path, template_values))
class YearInsightsDo(webapp.RequestHandler):
"""
Calculates insights of a given kind for a given year.
Calculations of a given kind should reuse items fetched from the datastore.
"""
def get(self, kind, year):
year = int(year)
insights = None
if kind == 'matches':
insights = InsightsHelper.doMatchInsights(year)
elif kind == 'awards':
insights = InsightsHelper.doAwardInsights(year)
elif kind == 'predictions':
insights = InsightsHelper.doPredictionInsights(year)
if insights != None:
InsightManipulator.createOrUpdate(insights)
template_values = {
'insights': insights,
'year': year,
'kind': kind,
}
path = os.path.join(os.path.dirname(__file__), '../templates/math/year_insights_do.html')
self.response.out.write(template.render(path, template_values))
def post(self):
self.get()
class OverallInsightsEnqueue(webapp.RequestHandler):
"""
Enqueues Overall Insights calculation for a given kind.
"""
def get(self, kind):
taskqueue.add(
target='backend-tasks-b2',
url='/backend-tasks-b2/math/do/overallinsights/{}'.format(kind),
method='GET')
template_values = {
'kind': kind,
}
path = os.path.join(os.path.dirname(__file__), '../templates/math/overall_insights_enqueue.html')
self.response.out.write(template.render(path, template_values))
class OverallInsightsDo(webapp.RequestHandler):
"""
Calculates overall insights of a given kind.
Calculations of a given kind should reuse items fetched from the datastore.
"""
def get(self, kind):
insights = None
if kind == 'matches':
insights = InsightsHelper.doOverallMatchInsights()
elif kind == 'awards':
insights = InsightsHelper.doOverallAwardInsights()
if insights != None:
InsightManipulator.createOrUpdate(insights)
template_values = {
'insights': insights,
'kind': kind,
}
path = os.path.join(os.path.dirname(__file__), '../templates/math/overall_insights_do.html')
self.response.out.write(template.render(path, template_values))
def post(self):
self.get()
class TypeaheadCalcEnqueue(webapp.RequestHandler):
"""
Enqueues typeahead calculations
"""
def get(self):
taskqueue.add(
target='backend-tasks-b2',
url='/backend-tasks-b2/math/do/typeaheadcalc',
method='GET')
template_values = {}
path = os.path.join(os.path.dirname(__file__), '../templates/math/typeaheadcalc_enqueue.html')
self.response.out.write(template.render(path, template_values))
class TypeaheadCalcDo(webapp.RequestHandler):
"""
Calculates typeahead entries
"""
def get(self):
@ndb.tasklet
def get_events_async():
event_keys = yield Event.query().order(-Event.year).order(Event.name).fetch_async(keys_only=True)
events = yield ndb.get_multi_async(event_keys)
raise ndb.Return(events)
@ndb.tasklet
def get_teams_async():
team_keys = yield Team.query().order(Team.team_number).fetch_async(keys_only=True)
teams = yield ndb.get_multi_async(team_keys)
raise ndb.Return(teams)
@ndb.tasklet
def get_districts_async():
district_keys = yield District.query().order(-District.year).fetch_async(keys_only=True)
districts = yield ndb.get_multi_async(district_keys)
raise ndb.Return(districts)
@ndb.toplevel
def get_events_teams_districts():
events, teams, districts = yield get_events_async(), get_teams_async(), get_districts_async()
raise ndb.Return((events, teams, districts))
events, teams, districts = get_events_teams_districts()
results = {}
for team in teams:
if not team.nickname:
nickname = "Team %s" % team.team_number
else:
nickname = team.nickname
data = '%s | %s' % (team.team_number, nickname)
if TypeaheadEntry.ALL_TEAMS_KEY in results:
results[TypeaheadEntry.ALL_TEAMS_KEY].append(data)
else:
results[TypeaheadEntry.ALL_TEAMS_KEY] = [data]
for district in districts:
data = '%s District [%s]' % (district.display_name, district.abbreviation.upper())
# all districts
if TypeaheadEntry.ALL_DISTRICTS_KEY in results:
if data not in results[TypeaheadEntry.ALL_DISTRICTS_KEY]:
results[TypeaheadEntry.ALL_DISTRICTS_KEY].append(data)
else:
results[TypeaheadEntry.ALL_DISTRICTS_KEY] = [data]
for event in events:
data = '%s %s [%s]' % (event.year, event.name, event.event_short.upper())
# all events
if TypeaheadEntry.ALL_EVENTS_KEY in results:
results[TypeaheadEntry.ALL_EVENTS_KEY].append(data)
else:
results[TypeaheadEntry.ALL_EVENTS_KEY] = [data]
# events by year
if TypeaheadEntry.YEAR_EVENTS_KEY.format(event.year) in results:
results[TypeaheadEntry.YEAR_EVENTS_KEY.format(event.year)].append(data)
else:
results[TypeaheadEntry.YEAR_EVENTS_KEY.format(event.year)] = [data]
# Prepare to remove old entries
old_entry_keys_future = TypeaheadEntry.query().fetch_async(keys_only=True)
# Add new entries
entries = []
for key_name, data in results.items():
entries.append(TypeaheadEntry(id=key_name, data_json=json.dumps(data)))
ndb.put_multi(entries)
# Remove old entries
old_entry_keys = set(old_entry_keys_future.get_result())
new_entry_keys = set([ndb.Key(TypeaheadEntry, key_name) for key_name in results.keys()])
keys_to_delete = old_entry_keys.difference(new_entry_keys)
logging.info("Removing the following unused TypeaheadEntries: {}".format([key.id() for key in keys_to_delete]))
ndb.delete_multi(keys_to_delete)
template_values = {'results': results}
path = os.path.join(os.path.dirname(__file__), '../templates/math/typeaheadcalc_do.html')
self.response.out.write(template.render(path, template_values))
class UpcomingNotificationDo(webapp.RequestHandler):
"""
Sends out notifications for upcoming matches
"""
def get(self):
live_events = EventHelper.getEventsWithinADay()
NotificationHelper.send_upcoming_matches(live_events)
class BlueZoneUpdateDo(webapp.RequestHandler):
"""
Update the current "best match"
"""
def get(self):
live_events = EventHelper.getEventsWithinADay()
try:
BlueZoneHelper.update_bluezone(live_events)
except Exception, e:
logging.error("BlueZone update failed")
logging.exception(e)
| mit | bf9e13cc98f5facf3bb226ad8f9d61d2 | 36.040761 | 126 | 0.656592 | 3.818207 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/cache_clearing/tests/database_cache_clearer_test.py | 1 | 21752 | import unittest
import pytest
from google.appengine.ext import ndb
from backend.common.cache_clearing import get_affected_queries
from backend.common.consts.award_type import AwardType
from backend.common.consts.event_type import EventType
from backend.common.consts.media_tag import MediaTag
from backend.common.models.district import District
from backend.common.models.district_team import DistrictTeam
from backend.common.models.event import Event
from backend.common.models.event_details import EventDetails
from backend.common.models.event_team import EventTeam
from backend.common.models.match import Match
from backend.common.models.team import Team
from backend.common.queries import (
award_query,
district_query,
event_details_query,
event_query,
match_query,
media_query,
robot_query,
team_query,
)
@pytest.mark.usefixtures("ndb_context")
class TestDatabaseCacheClearer(unittest.TestCase):
def setUp(self) -> None:
eventteam_2015casj_frc254 = EventTeam(
id="2015casj_frc254",
event=ndb.Key(Event, "2015casj"),
team=ndb.Key(Team, "frc254"),
year=2015,
)
eventteam_2015cama_frc604 = EventTeam(
id="2015cama_frc604",
event=ndb.Key(Event, "2015cama"),
team=ndb.Key(Team, "frc604"),
year=2015,
)
eventteam_2010cama_frc604 = EventTeam(
id="2010cama_frc604",
event=ndb.Key(Event, "2010cama"),
team=ndb.Key(Team, "frc604"),
year=2010,
)
eventteam_2016necmp_frc125 = EventTeam(
id="2016necmp_frc125",
event=ndb.Key(Event, "2016necmp"),
team=ndb.Key(Team, "frc125"),
year=2016,
)
eventteam_2015casj_frc254.put()
eventteam_2015cama_frc604.put()
eventteam_2010cama_frc604.put()
eventteam_2016necmp_frc125.put()
districtteam_2015fim_frc254 = DistrictTeam(
id="2015fim_frc254",
district_key=ndb.Key(District, "2015fim"),
team=ndb.Key(Team, "frc254"),
year=2015,
)
districtteam_2015mar_frc604 = DistrictTeam(
id="2015mar_frc604",
district_key=ndb.Key(District, "2015mar"),
team=ndb.Key(Team, "frc604"),
year=2015,
)
districtteam_2016ne_frc604 = DistrictTeam(
id="2016ne_frc604",
district_key=ndb.Key(District, "2016ne"),
team=ndb.Key(Team, "frc604"),
year=2016,
)
districtteam_2015fim_frc254.put()
districtteam_2015mar_frc604.put()
districtteam_2016ne_frc604.put()
district_2015ne = District(
id="2015ne",
year=2015,
abbreviation="ne",
)
district_2016chs = District(
id="2016chs",
year=2016,
abbreviation="chs",
)
district_2015ne.put()
district_2016chs.put()
event_2016necmp = Event(
id="2016necmp",
year=2016,
district_key=ndb.Key(District, "2016ne"),
event_short="necmp",
event_type_enum=EventType.DISTRICT_CMP,
)
event_2016necmp.put()
event_2015casj = Event(
id="2015casj",
year=2015,
event_short="casj",
event_type_enum=EventType.REGIONAL,
parent_event=ndb.Key(Event, "2015cafoo"),
)
event_2015casj.put()
def test_award_updated(self) -> None:
affected_refs = {
"event": {ndb.Key(Event, "2015casj"), ndb.Key(Event, "2015cama")},
"team_list": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
"year": {2014, 2015},
"event_type_enum": {EventType.REGIONAL, EventType.DISTRICT},
"award_type_enum": {AwardType.WINNER, AwardType.CHAIRMANS},
}
cache_keys = [q[0] for q in get_affected_queries.award_updated(affected_refs)]
self.assertEqual(len(cache_keys), 20)
self.assertTrue(
award_query.EventAwardsQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
award_query.EventAwardsQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(award_query.TeamAwardsQuery("frc254").cache_key in cache_keys)
self.assertTrue(award_query.TeamAwardsQuery("frc604").cache_key in cache_keys)
self.assertTrue(
award_query.TeamYearAwardsQuery("frc254", 2014).cache_key in cache_keys
)
self.assertTrue(
award_query.TeamYearAwardsQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
award_query.TeamYearAwardsQuery("frc604", 2014).cache_key in cache_keys
)
self.assertTrue(
award_query.TeamYearAwardsQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(
award_query.TeamEventAwardsQuery("frc254", "2015casj").cache_key
in cache_keys
)
self.assertTrue(
award_query.TeamEventAwardsQuery("frc254", "2015cama").cache_key
in cache_keys
)
self.assertTrue(
award_query.TeamEventAwardsQuery("frc604", "2015casj").cache_key
in cache_keys
)
self.assertTrue(
award_query.TeamEventAwardsQuery("frc604", "2015cama").cache_key
in cache_keys
)
for team_key in ["frc254", "frc604"]:
for event_type in [EventType.REGIONAL, EventType.DISTRICT]:
for award_type in [AwardType.WINNER, AwardType.CHAIRMANS]:
self.assertTrue(
award_query.TeamEventTypeAwardsQuery(
team_key, event_type, award_type
).cache_key
in cache_keys
)
def test_event_updated(self) -> None:
affected_refs = {
"key": {ndb.Key(Event, "2015casj"), ndb.Key(Event, "2015cama")},
"year": {2014, 2015},
"district_key": {
ndb.Key(District, "2015fim"),
ndb.Key(District, "2014mar"),
},
}
cache_keys = [q[0] for q in get_affected_queries.event_updated(affected_refs)]
self.assertEqual(len(cache_keys), 15)
self.assertTrue(event_query.EventQuery("2015casj").cache_key in cache_keys)
self.assertTrue(event_query.EventQuery("2015cama").cache_key in cache_keys)
self.assertTrue(event_query.EventListQuery(2014).cache_key in cache_keys)
self.assertTrue(event_query.EventListQuery(2015).cache_key in cache_keys)
self.assertTrue(
event_query.DistrictEventsQuery("2015fim").cache_key in cache_keys
)
self.assertTrue(
event_query.DistrictEventsQuery("2014mar").cache_key in cache_keys
)
self.assertTrue(event_query.TeamEventsQuery("frc254").cache_key in cache_keys)
self.assertTrue(event_query.TeamEventsQuery("frc604").cache_key in cache_keys)
self.assertTrue(
event_query.TeamYearEventsQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventsQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.EventDivisionsQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
event_query.EventDivisionsQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(
event_query.EventDivisionsQuery("2015cafoo").cache_key in cache_keys
)
def test_event_details_updated(self) -> None:
affected_refs = {
"key": {
ndb.Key(EventDetails, "2015casj"),
ndb.Key(EventDetails, "2015cama"),
},
}
cache_keys = [
q[0] for q in get_affected_queries.event_details_updated(affected_refs)
]
self.assertEqual(len(cache_keys), 2)
self.assertTrue(
event_details_query.EventDetailsQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
event_details_query.EventDetailsQuery("2015cama").cache_key in cache_keys
)
def test_match_updated(self) -> None:
affected_refs = {
"key": {ndb.Key(Match, "2015casj_qm1"), ndb.Key(Match, "2015casj_qm2")},
"event": {ndb.Key(Event, "2015casj"), ndb.Key(Event, "2015cama")},
"team_keys": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
"year": {2014, 2015},
}
cache_keys = [q[0] for q in get_affected_queries.match_updated(affected_refs)]
self.assertEqual(len(cache_keys), 12)
self.assertTrue(match_query.MatchQuery("2015casj_qm1").cache_key in cache_keys)
self.assertTrue(match_query.MatchQuery("2015casj_qm2").cache_key in cache_keys)
# self.assertTrue(match_query.MatchGdcvDataQuery('2015casj_qm1').cache_key in cache_keys)
# self.assertTrue(match_query.MatchGdcvDataQuery('2015casj_qm2').cache_key in cache_keys)
self.assertTrue(
match_query.EventMatchesQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
match_query.EventMatchesQuery("2015cama").cache_key in cache_keys
)
# self.assertTrue(match_query.EventMatchesGdcvDataQuery('2015casj').cache_key in cache_keys)
# self.assertTrue(match_query.EventMatchesGdcvDataQuery('2015cama').cache_key in cache_keys)
self.assertTrue(
match_query.TeamEventMatchesQuery("frc254", "2015casj").cache_key
in cache_keys
)
self.assertTrue(
match_query.TeamEventMatchesQuery("frc254", "2015cama").cache_key
in cache_keys
)
self.assertTrue(
match_query.TeamEventMatchesQuery("frc604", "2015casj").cache_key
in cache_keys
)
self.assertTrue(
match_query.TeamEventMatchesQuery("frc604", "2015cama").cache_key
in cache_keys
)
self.assertTrue(
match_query.TeamYearMatchesQuery("frc254", 2014).cache_key in cache_keys
)
self.assertTrue(
match_query.TeamYearMatchesQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
match_query.TeamYearMatchesQuery("frc604", 2014).cache_key in cache_keys
)
self.assertTrue(
match_query.TeamYearMatchesQuery("frc604", 2015).cache_key in cache_keys
)
def test_media_updated_team(self) -> None:
affected_refs = {
"references": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
"year": {2014, 2015},
"media_tag_enum": {MediaTag.CHAIRMANS_ESSAY, MediaTag.CHAIRMANS_VIDEO},
}
cache_keys = [q[0] for q in get_affected_queries.media_updated(affected_refs)]
self.assertEqual(len(cache_keys), 22)
self.assertTrue(
media_query.TeamYearMediaQuery("frc254", 2014).cache_key in cache_keys
)
self.assertTrue(
media_query.TeamYearMediaQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
media_query.TeamSocialMediaQuery("frc254").cache_key in cache_keys
)
self.assertTrue(
media_query.TeamYearMediaQuery("frc604", 2014).cache_key in cache_keys
)
self.assertTrue(
media_query.TeamYearMediaQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(
media_query.TeamSocialMediaQuery("frc604").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsMediasQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsMediasQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsPreferredMediasQuery("2015cama").cache_key
in cache_keys
)
self.assertTrue(
media_query.EventTeamsPreferredMediasQuery("2015casj").cache_key
in cache_keys
)
self.assertTrue(
media_query.TeamTagMediasQuery("frc254", MediaTag.CHAIRMANS_ESSAY).cache_key
in cache_keys
)
self.assertTrue(
media_query.TeamTagMediasQuery("frc604", MediaTag.CHAIRMANS_VIDEO).cache_key
in cache_keys
)
self.assertTrue(
media_query.TeamYearTagMediasQuery(
"frc254", 2014, MediaTag.CHAIRMANS_ESSAY
).cache_key
in cache_keys
)
self.assertTrue(
media_query.TeamYearTagMediasQuery(
"frc604", 2015, MediaTag.CHAIRMANS_VIDEO
).cache_key
in cache_keys
)
def test_media_updated_event(self) -> None:
affected_refs = {
"references": {ndb.Key(Event, "2016necmp")},
"year": {2016},
"media_tag_enum": {None, None},
}
cache_keys = [q[0] for q in get_affected_queries.media_updated(affected_refs)]
self.assertEqual(len(cache_keys), 1)
self.assertTrue(
media_query.EventMediasQuery("2016necmp").cache_key in cache_keys
)
def test_robot_updated(self) -> None:
affected_refs = {
"team": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
}
cache_keys = [q[0] for q in get_affected_queries.robot_updated(affected_refs)]
self.assertEqual(len(cache_keys), 2)
self.assertTrue(robot_query.TeamRobotsQuery("frc254").cache_key in cache_keys)
self.assertTrue(robot_query.TeamRobotsQuery("frc604").cache_key in cache_keys)
def test_team_updated(self) -> None:
affected_refs = {
"key": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
}
cache_keys = [q[0] for q in get_affected_queries.team_updated(affected_refs)]
self.assertEqual(len(cache_keys), 16)
self.assertTrue(team_query.TeamQuery("frc254").cache_key in cache_keys)
self.assertTrue(team_query.TeamQuery("frc604").cache_key in cache_keys)
self.assertTrue(team_query.TeamListQuery(0).cache_key in cache_keys)
self.assertTrue(team_query.TeamListQuery(1).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2015, 0).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2015, 1).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2010, 1).cache_key in cache_keys)
self.assertTrue(
team_query.DistrictTeamsQuery("2015fim").cache_key in cache_keys
)
self.assertTrue(
team_query.DistrictTeamsQuery("2015mar").cache_key in cache_keys
)
self.assertTrue(team_query.DistrictTeamsQuery("2016ne").cache_key in cache_keys)
self.assertTrue(team_query.EventTeamsQuery("2015casj").cache_key in cache_keys)
self.assertTrue(team_query.EventTeamsQuery("2015cama").cache_key in cache_keys)
self.assertTrue(team_query.EventTeamsQuery("2010cama").cache_key in cache_keys)
self.assertTrue(
team_query.EventEventTeamsQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
team_query.EventEventTeamsQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(
team_query.EventEventTeamsQuery("2010cama").cache_key in cache_keys
)
def test_eventteam_updated(self) -> None:
affected_refs = {
"event": {ndb.Key(Event, "2015casj"), ndb.Key(Event, "2015cama")},
"team": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
"year": {2014, 2015},
}
cache_keys = [
q[0] for q in get_affected_queries.eventteam_updated(affected_refs)
]
self.assertEqual(len(cache_keys), 24)
self.assertTrue(event_query.TeamEventsQuery("frc254").cache_key in cache_keys)
self.assertTrue(event_query.TeamEventsQuery("frc604").cache_key in cache_keys)
self.assertTrue(
team_query.TeamParticipationQuery("frc254").cache_key in cache_keys
)
self.assertTrue(
team_query.TeamParticipationQuery("frc604").cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventsQuery("frc254", 2014).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventsQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventsQuery("frc604", 2014).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventsQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc254", 2014).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc254", 2015).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc604", 2014).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc604", 2015).cache_key in cache_keys
)
self.assertTrue(team_query.TeamListYearQuery(2014, 0).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2014, 1).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2015, 0).cache_key in cache_keys)
self.assertTrue(team_query.TeamListYearQuery(2015, 1).cache_key in cache_keys)
self.assertTrue(team_query.EventTeamsQuery("2015casj").cache_key in cache_keys)
self.assertTrue(team_query.EventTeamsQuery("2015cama").cache_key in cache_keys)
self.assertTrue(
team_query.EventEventTeamsQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
team_query.EventEventTeamsQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsMediasQuery("2015cama").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsMediasQuery("2015casj").cache_key in cache_keys
)
self.assertTrue(
media_query.EventTeamsPreferredMediasQuery("2015cama").cache_key
in cache_keys
)
self.assertTrue(
media_query.EventTeamsPreferredMediasQuery("2015casj").cache_key
in cache_keys
)
def test_districtteam_updated(self) -> None:
affected_refs = {
"district_key": {
ndb.Key(District, "2015fim"),
ndb.Key(District, "2015mar"),
},
"team": {ndb.Key(Team, "frc254"), ndb.Key(Team, "frc604")},
}
cache_keys = [
q[0] for q in get_affected_queries.districtteam_updated(affected_refs)
]
self.assertEqual(len(cache_keys), 4)
self.assertTrue(
team_query.DistrictTeamsQuery("2015fim").cache_key in cache_keys
)
self.assertTrue(
team_query.DistrictTeamsQuery("2015mar").cache_key in cache_keys
)
self.assertTrue(
district_query.TeamDistrictsQuery("frc254").cache_key in cache_keys
)
self.assertTrue(
district_query.TeamDistrictsQuery("frc604").cache_key in cache_keys
)
def test_district_updated(self) -> None:
affected_refs = {
"key": {ndb.Key(District, "2016ne")},
"year": {2015, 2016},
"abbreviation": {"ne", "chs"},
}
cache_keys = [
q[0] for q in get_affected_queries.district_updated(affected_refs)
]
self.assertEqual(len(cache_keys), 13)
self.assertTrue(
district_query.DistrictsInYearQuery(2015).cache_key in cache_keys
)
self.assertTrue(
district_query.DistrictsInYearQuery(2016).cache_key in cache_keys
)
self.assertTrue(
district_query.DistrictHistoryQuery("ne").cache_key in cache_keys
)
self.assertTrue(
district_query.DistrictHistoryQuery("chs").cache_key in cache_keys
)
self.assertTrue(district_query.DistrictQuery("2016ne").cache_key in cache_keys)
self.assertTrue(
district_query.TeamDistrictsQuery("frc604").cache_key in cache_keys
)
# Necessary because APIv3 Event models include the District model
self.assertTrue(event_query.EventQuery("2016necmp").cache_key in cache_keys)
self.assertTrue(event_query.EventListQuery(2016).cache_key in cache_keys)
self.assertTrue(
event_query.DistrictEventsQuery("2016ne").cache_key in cache_keys
)
self.assertTrue(event_query.TeamEventsQuery("frc125").cache_key in cache_keys)
self.assertTrue(
event_query.TeamYearEventsQuery("frc125", 2016).cache_key in cache_keys
)
self.assertTrue(
event_query.TeamYearEventTeamsQuery("frc125", 2016).cache_key in cache_keys
)
self.assertTrue(
event_query.EventDivisionsQuery("2016necmp").cache_key in cache_keys
)
| mit | 0d639ecbf6ba851368d08855b760033e | 38.334539 | 100 | 0.603669 | 3.68553 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/datafeeds/parsers/json/json_alliance_selections_parser.py | 8 | 1187 | import json
import re
from datafeeds.parser_base import ParserInputException, ParserBase
class JSONAllianceSelectionsParser(ParserBase):
@classmethod
def parse(self, alliances_json):
"""
Parse JSON that contains team_keys
Format is as follows:
[[captain1, pick1-1, pick1-2(, ...)],
['frc254', 'frc971', 'frc604'],
...
[captain8, pick8-1, pick8-2(, ...)]]
"""
try:
alliances = json.loads(alliances_json)
except:
raise ParserInputException("Invalid JSON. Please check input.")
alliance_selections = []
for alliance in alliances:
is_empty = True
selection = {'picks': [], 'declines': []}
for team_key in alliance:
if not re.match(r'frc\d+', str(team_key)):
raise ParserInputException("Bad team_key: '{}'. Must follow format: 'frcXXX'".format(team_key))
else:
selection['picks'].append(team_key)
is_empty = False
if not is_empty:
alliance_selections.append(selection)
return alliance_selections
| mit | 7fec066585d29bc5c6c723c54a15e6b7 | 32.914286 | 115 | 0.550969 | 4.037415 | false | false | false | false |
the-blue-alliance/the-blue-alliance | old_py2/tests/test_event_simulator.py | 1 | 16081 | import unittest2
from google.appengine.ext import ndb
from google.appengine.ext import testbed
from helpers.event_simulator import EventSimulator
from helpers.match_helper import MatchHelper
from models.event import Event
class TestEventSimulator(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
ndb.get_context().clear_cache() # Prevent data from leaking between tests
self.testbed.init_taskqueue_stub(root_path=".")
self._alliance_selections = [{u'declines': [], u'backup': None, u'name': u'Alliance 1', u'picks': [u'frc359', u'frc3990', u'frc4508']}, {u'declines': [], u'backup': None, u'name': u'Alliance 2', u'picks': [u'frc5254', u'frc20', u'frc229']}, {u'declines': [], u'backup': None, u'name': u'Alliance 3', u'picks': [u'frc5236', u'frc2791', u'frc3624']}, {u'declines': [], u'backup': None, u'name': u'Alliance 4', u'picks': [u'frc3419', u'frc5240', u'frc663']}, {u'declines': [], u'backup': None, u'name': u'Alliance 5', u'picks': [u'frc48', u'frc1493', u'frc1551']}, {u'declines': [], u'backup': None, u'name': u'Alliance 6', u'picks': [u'frc250', u'frc333', u'frc145']}, {u'declines': [], u'backup': None, u'name': u'Alliance 7', u'picks': [u'frc358', u'frc3003', u'frc527']}, {u'declines': [], u'backup': None, u'name': u'Alliance 8', u'picks': [u'frc4930', u'frc3044', u'frc4481']}]
self._alliance_selections_with_backup = [{u'declines': [], u'backup': None, u'name': u'Alliance 1', u'picks': [u'frc359', u'frc3990', u'frc4508']}, {u'declines': [], u'backup': {u'in': u'frc1665', u'out': u'frc229'}, u'name': u'Alliance 2', u'picks': [u'frc5254', u'frc20', u'frc229']}, {u'declines': [], u'backup': None, u'name': u'Alliance 3', u'picks': [u'frc5236', u'frc2791', u'frc3624']}, {u'declines': [], u'backup': None, u'name': u'Alliance 4', u'picks': [u'frc3419', u'frc5240', u'frc663']}, {u'declines': [], u'backup': None, u'name': u'Alliance 5', u'picks': [u'frc48', u'frc1493', u'frc1551']}, {u'declines': [], u'backup': None, u'name': u'Alliance 6', u'picks': [u'frc250', u'frc333', u'frc145']}, {u'declines': [], u'backup': None, u'name': u'Alliance 7', u'picks': [u'frc358', u'frc3003', u'frc527']}, {u'declines': [], u'backup': None, u'name': u'Alliance 8', u'picks': [u'frc4930', u'frc3044', u'frc4481']}]
def tearDown(self):
self.testbed.deactivate()
def test_event_smulator(self):
es = EventSimulator()
# Before anything has happened
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details, None)
self.assertEqual(event.matches, [])
# Qual match schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertNotEqual(event.details, None)
for rank in event.details.rankings2:
self.assertEqual(rank['sort_orders'][0], 0)
self.assertEqual(len(event.matches), 72)
for match in event.matches:
self.assertEqual(match.comp_level, 'qm')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each qual match
for i in xrange(72):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, None)
self.assertEqual(len(event.matches), 72)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# Check some final rankings
self.assertEqual(event.details.rankings2[0]['sort_orders'][0], 22)
self.assertEqual(event.details.rankings2[-1]['sort_orders'][0], 4)
# After alliance selections
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 72)
# QF schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 84)
for match in event.matches:
if match.comp_level == 'qm':
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'qf')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each QF match
for i in xrange(72, 82):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
if i <= 75:
self.assertEqual(len(event.matches), 84)
elif i <= 77:
self.assertEqual(len(event.matches), 86) # 1 match removed, 3 added
else:
self.assertEqual(len(event.matches), 88) # 1 match removed, 3 added
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if match.key.id() in {'2016nytr_qf1m3', '2016nytr_qf3m3'}:
# Unneeded tiebreak matches
self.assertFalse(match.has_been_played)
elif j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# Check SF Matches
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 88)
for match in event.matches:
if match.comp_level in {'qm', 'qf'}:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'sf')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each SF match
for i in xrange(82, 87):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
if i < 85:
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
else:
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
if i <= 83:
self.assertEqual(len(event.matches), 88)
else:
self.assertEqual(len(event.matches), 90) # 1 match removed, 3 added
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if match.key.id() == '2016nytr_sf1m3':
# Unneeded tiebreak matches
self.assertFalse(match.has_been_played)
elif j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# Check F Matches
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
self.assertEqual(len(event.matches), 90)
for match in event.matches:
if match.comp_level in {'qm', 'qf', 'sf'}:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'f')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each F match
for i in xrange(87, 90):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
self.assertEqual(len(event.matches), 90)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
def test_event_smulator_batch_advance(self):
es = EventSimulator(batch_advance=True)
# Before anything has happened
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details, None)
self.assertEqual(event.matches, [])
# Qual match schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertNotEqual(event.details, None)
for rank in event.details.rankings2:
self.assertEqual(rank['sort_orders'][0], 0)
self.assertEqual(len(event.matches), 72)
for match in event.matches:
self.assertEqual(match.comp_level, 'qm')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each qual match
for i in xrange(72):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, None)
self.assertEqual(len(event.matches), 72)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# Check some final rankings
self.assertEqual(event.details.rankings2[0]['sort_orders'][0], 22)
self.assertEqual(event.details.rankings2[-1]['sort_orders'][0], 4)
# After alliance selections
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 72)
# QF schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 84)
for match in event.matches:
if match.comp_level == 'qm':
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'qf')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each QF match
for i in xrange(72, 82):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
if i <= 75:
self.assertEqual(len(event.matches), 84)
elif i <= 77:
self.assertEqual(len(event.matches), 83)
else:
self.assertEqual(len(event.matches), 82)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if match.key.id() in {'2016nytr_qf1m3', '2016nytr_qf3m3'}:
# Unneeded tiebreak matches
self.assertFalse(match.has_been_played)
elif j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# SF schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
self.assertEqual(len(event.matches), 88)
for match in event.matches:
if match.comp_level in {'qm', 'qf'}:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'sf')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each SF match
for i in xrange(82, 87):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
if i < 85:
self.assertEqual(event.details.alliance_selections, self._alliance_selections)
else:
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
if i <= 83:
self.assertEqual(len(event.matches), 88)
else:
self.assertEqual(len(event.matches), 87)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if match.key.id() == '2016nytr_sf1m3':
# Unneeded tiebreak matches
self.assertFalse(match.has_been_played)
elif j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
# F schedule added
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
self.assertEqual(len(event.matches), 90)
for match in event.matches:
if match.comp_level in {'qm', 'qf', 'sf'}:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertEqual(match.comp_level, 'f')
self.assertFalse(match.has_been_played)
self.assertEqual(match.actual_time, None)
# After each F match
for i in xrange(87, 90):
es.step()
event = Event.get_by_id('2016nytr')
self.assertNotEqual(event, None)
self.assertEqual(event.details.alliance_selections, self._alliance_selections_with_backup)
self.assertEqual(len(event.matches), 90)
matches = MatchHelper.play_order_sorted_matches(event.matches)
for j, match in enumerate(matches):
if j <= i:
self.assertTrue(match.has_been_played)
self.assertNotEqual(match.actual_time, None)
else:
self.assertFalse(match.has_been_played)
| mit | 2c3a8fd09d29dd386bf0b7fb859e07d2 | 45.20977 | 934 | 0.58168 | 3.726767 | false | false | false | false |
the-blue-alliance/the-blue-alliance | src/backend/common/helpers/tests/offseason_event_helper_test.py | 1 | 8088 | import datetime
import pytest
from backend.common.consts.event_type import EventType
from backend.common.helpers.offseason_event_helper import OffseasonEventHelper
from backend.common.models.event import Event
@pytest.fixture(autouse=True)
def auto_add_ndb_stub(ndb_stub):
yield
def test_is_direct_match_key_name():
first_event_match = Event(year=2020, event_short="zor")
second_event_match = Event(year=2020, event_short="zor")
third_event_match = Event(year=2020, event_short="iri")
assert OffseasonEventHelper.is_direct_match(first_event_match, second_event_match)
assert not OffseasonEventHelper.is_direct_match(
first_event_match, third_event_match
)
def test_is_direct_match_key_name_with_first_code():
tba_event_one = Event(year=2020, first_code="zor", event_short="zorr")
tba_event_two = Event(year=2020, first_code="iri", event_short="irii")
first_event = Event(year=2020, event_short="zor")
assert OffseasonEventHelper.is_direct_match(tba_event_one, first_event)
assert not OffseasonEventHelper.is_direct_match(tba_event_two, first_event)
def test_is_maybe_match():
event_one = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
event_two = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
assert OffseasonEventHelper.is_maybe_match(event_one, event_two)
def test_is_maybe_match_wrong_start():
event_one = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
event_two = Event(
start_date=datetime.datetime(
year=2020, month=7, day=13, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
assert not OffseasonEventHelper.is_maybe_match(event_one, event_two)
event_two.start_date = event_one.start_date
assert OffseasonEventHelper.is_maybe_match(event_one, event_two)
def test_is_maybe_match_wrong_end():
event_one = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
event_two = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=16, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
assert not OffseasonEventHelper.is_maybe_match(event_one, event_two)
event_two.end_date = event_one.end_date
assert OffseasonEventHelper.is_maybe_match(event_one, event_two)
def test_is_maybe_match_wrong_city():
event_one = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
event_two = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="Sandusky",
state_prov="OH",
)
assert not OffseasonEventHelper.is_maybe_match(event_one, event_two)
event_two.city = event_one.city
assert OffseasonEventHelper.is_maybe_match(event_one, event_two)
def test_is_maybe_match_wrong_state_prov():
event_one = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="OH",
)
event_two = Event(
start_date=datetime.datetime(
year=2020, month=7, day=14, hour=0, minute=0, second=0
),
end_date=datetime.datetime(
year=2020, month=7, day=15, hour=23, minute=59, second=59
),
city="London",
state_prov="CA",
)
assert not OffseasonEventHelper.is_maybe_match(event_one, event_two)
event_two.state_prov = event_one.state_prov
assert OffseasonEventHelper.is_maybe_match(event_one, event_two)
def test_categorize_offseasons():
# Setup some existing TBA events in the database - these will be queried for our test
preseason_event = Event(
id="2016mipre",
name="Michigan Preseason Event",
event_type_enum=EventType.PRESEASON,
short_name="MI Preseason",
event_short="mipre",
first_code="mierp",
year=2016,
end_date=datetime.datetime(2016, 2, 25),
official=False,
city="Anytown",
state_prov="MI",
country="USA",
venue="Some Venue",
venue_address="Some Venue, Anytown, MI, USA",
timezone_id="America/New_York",
start_date=datetime.datetime(2016, 2, 24),
webcast_json="",
website=None,
)
preseason_event.put()
offseason_event = Event(
id="2016mioff",
name="Michigan Offseason Event",
event_type_enum=EventType.OFFSEASON,
short_name="MI Offseason",
event_short="mioff",
year=2016,
end_date=datetime.datetime(2016, 6, 25),
official=False,
city="Anytown",
state_prov="MI",
country="USA",
venue="Some Venue",
venue_address="Some Venue, Anytown, MI, USA",
timezone_id="America/New_York",
start_date=datetime.datetime(2016, 6, 24),
webcast_json="",
website=None,
)
offseason_event.put()
# Exact match
first_preseason = Event(year=2016, event_short="mierp")
# Indirect match
first_offseason = Event(
year=2016,
event_short="miffo",
start_date=datetime.datetime(2016, 6, 24),
end_date=datetime.datetime(2016, 6, 25),
city="Anytown",
state_prov="MI",
)
first_new_event = Event(year=2016, event_short="minew")
first_events = [first_preseason, first_offseason, first_new_event]
existing, new = OffseasonEventHelper.categorize_offseasons(2016, first_events)
# Should have two existing events
assert len(existing) == 2
assert (preseason_event, first_preseason) in existing
assert (offseason_event, first_offseason) in existing
# Should have one new event
assert len(new) == 1
assert new == [first_new_event]
def test_categorize_offseasons_no_events():
first_preseason = Event(year=2016, event_short="mierp")
first_offseason = Event(
year=2016,
event_short="miffo",
start_date=datetime.datetime(2016, 6, 24),
end_date=datetime.datetime(2016, 6, 25),
city="Anytown",
state_prov="MI",
)
first_new_event = Event(year=2016, event_short="minew")
first_events = [first_preseason, first_offseason, first_new_event]
existing, new = OffseasonEventHelper.categorize_offseasons(2016, first_events)
# Should have no existing events
assert len(existing) == 0
assert len(new) == 3
| mit | df7d248f3ca94d4136dd92b43c9f0479 | 31.481928 | 89 | 0.612636 | 3.328395 | false | false | false | false |
boppreh/keyboard | keyboard/_darwinmouse.py | 2 | 6515 | import os
import datetime
import threading
import Quartz
from ._mouse_event import ButtonEvent, WheelEvent, MoveEvent, LEFT, RIGHT, MIDDLE, X, X2, UP, DOWN
_button_mapping = {
LEFT: (Quartz.kCGMouseButtonLeft, Quartz.kCGEventLeftMouseDown, Quartz.kCGEventLeftMouseUp, Quartz.kCGEventLeftMouseDragged),
RIGHT: (Quartz.kCGMouseButtonRight, Quartz.kCGEventRightMouseDown, Quartz.kCGEventRightMouseUp, Quartz.kCGEventRightMouseDragged),
MIDDLE: (Quartz.kCGMouseButtonCenter, Quartz.kCGEventOtherMouseDown, Quartz.kCGEventOtherMouseUp, Quartz.kCGEventOtherMouseDragged)
}
_button_state = {
LEFT: False,
RIGHT: False,
MIDDLE: False
}
_last_click = {
"time": None,
"button": None,
"position": None,
"click_count": 0
}
class MouseEventListener(object):
def __init__(self, callback, blocking=False):
self.blocking = blocking
self.callback = callback
self.listening = True
def run(self):
""" Creates a listener and loops while waiting for an event. Intended to run as
a background thread. """
self.tap = Quartz.CGEventTapCreate(
Quartz.kCGSessionEventTap,
Quartz.kCGHeadInsertEventTap,
Quartz.kCGEventTapOptionDefault,
Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventLeftMouseUp) |
Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventRightMouseUp) |
Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseDown) |
Quartz.CGEventMaskBit(Quartz.kCGEventOtherMouseUp) |
Quartz.CGEventMaskBit(Quartz.kCGEventMouseMoved) |
Quartz.CGEventMaskBit(Quartz.kCGEventScrollWheel),
self.handler,
None)
loopsource = Quartz.CFMachPortCreateRunLoopSource(None, self.tap, 0)
loop = Quartz.CFRunLoopGetCurrent()
Quartz.CFRunLoopAddSource(loop, loopsource, Quartz.kCFRunLoopDefaultMode)
Quartz.CGEventTapEnable(self.tap, True)
while self.listening:
Quartz.CFRunLoopRunInMode(Quartz.kCFRunLoopDefaultMode, 5, False)
def handler(self, proxy, e_type, event, refcon):
# TODO Separate event types by button/wheel/move
scan_code = Quartz.CGEventGetIntegerValueField(event, Quartz.kCGKeyboardEventKeycode)
key_name = name_from_scancode(scan_code)
flags = Quartz.CGEventGetFlags(event)
event_type = ""
is_keypad = (flags & Quartz.kCGEventFlagMaskNumericPad)
if e_type == Quartz.kCGEventKeyDown:
event_type = "down"
elif e_type == Quartz.kCGEventKeyUp:
event_type = "up"
if self.blocking:
return None
self.callback(KeyboardEvent(event_type, scan_code, name=key_name, is_keypad=is_keypad))
return event
# Exports
def init():
""" Initializes mouse state """
pass
def listen(queue):
""" Appends events to the queue (ButtonEvent, WheelEvent, and MoveEvent). """
if not os.geteuid() == 0:
raise OSError("Error 13 - Must be run as administrator")
listener = MouseEventListener(lambda e: queue.put(e) or is_allowed(e.name, e.event_type == KEY_UP))
t = threading.Thread(target=listener.run, args=())
t.daemon = True
t.start()
def press(button=LEFT):
""" Sends a down event for the specified button, using the provided constants """
location = get_position()
button_code, button_down, _, _ = _button_mapping[button]
e = Quartz.CGEventCreateMouseEvent(
None,
button_down,
location,
button_code)
# Check if this is a double-click (same location within the last 300ms)
if _last_click["time"] is not None and datetime.datetime.now() - _last_click["time"] < datetime.timedelta(seconds=0.3) and _last_click["button"] == button and _last_click["position"] == location:
# Repeated Click
_last_click["click_count"] = min(3, _last_click["click_count"]+1)
else:
# Not a double-click - Reset last click
_last_click["click_count"] = 1
Quartz.CGEventSetIntegerValueField(
e,
Quartz.kCGMouseEventClickState,
_last_click["click_count"])
Quartz.CGEventPost(Quartz.kCGHIDEventTap, e)
_button_state[button] = True
_last_click["time"] = datetime.datetime.now()
_last_click["button"] = button
_last_click["position"] = location
def release(button=LEFT):
""" Sends an up event for the specified button, using the provided constants """
location = get_position()
button_code, _, button_up, _ = _button_mapping[button]
e = Quartz.CGEventCreateMouseEvent(
None,
button_up,
location,
button_code)
if _last_click["time"] is not None and _last_click["time"] > datetime.datetime.now() - datetime.timedelta(microseconds=300000) and _last_click["button"] == button and _last_click["position"] == location:
# Repeated Click
Quartz.CGEventSetIntegerValueField(
e,
Quartz.kCGMouseEventClickState,
_last_click["click_count"])
Quartz.CGEventPost(Quartz.kCGHIDEventTap, e)
_button_state[button] = False
def wheel(delta=1):
""" Sends a wheel event for the provided number of clicks. May be negative to reverse
direction. """
location = get_position()
e = Quartz.CGEventCreateMouseEvent(
None,
Quartz.kCGEventScrollWheel,
location,
Quartz.kCGMouseButtonLeft)
e2 = Quartz.CGEventCreateScrollWheelEvent(
None,
Quartz.kCGScrollEventUnitLine,
1,
delta)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, e)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, e2)
def move_to(x, y):
""" Sets the mouse's location to the specified coordinates. """
for b in _button_state:
if _button_state[b]:
e = Quartz.CGEventCreateMouseEvent(
None,
_button_mapping[b][3], # Drag Event
(x, y),
_button_mapping[b][0])
break
else:
e = Quartz.CGEventCreateMouseEvent(
None,
Quartz.kCGEventMouseMoved,
(x, y),
Quartz.kCGMouseButtonLeft)
Quartz.CGEventPost(Quartz.kCGHIDEventTap, e)
def get_position():
""" Returns the mouse's location as a tuple of (x, y). """
e = Quartz.CGEventCreate(None)
point = Quartz.CGEventGetLocation(e)
return (point.x, point.y) | mit | df4417b7fffb2ec47e8a779b8bab003f | 36.66474 | 207 | 0.652955 | 3.527342 | false | false | false | false |
bcb/jsonrpcserver | jsonrpcserver/methods.py | 1 | 1389 | """A method is a Python function that can be called by a JSON-RPC request.
They're held in a dict, a mapping of function names to functions.
The @method decorator adds a method to jsonrpcserver's internal global_methods dict.
Alternatively pass your own dictionary of methods to `dispatch` with the methods param.
>>> dispatch(request) # Uses the internal collection of funcs added with @method
>>> dispatch(request, methods={"ping": lambda: "pong"}) # Custom collection
Methods can take either positional or named arguments, but not both. This is a
limitation of JSON-RPC.
"""
from typing import Any, Callable, Dict, Optional, cast
from .result import Result
Method = Callable[..., Result]
Methods = Dict[str, Method]
global_methods = dict()
def method(
f: Optional[Method] = None, name: Optional[str] = None
) -> Callable[..., Any]:
"""A decorator to add a function into jsonrpcserver's internal global_methods dict.
The global_methods dict will be used by default unless a methods argument is passed
to `dispatch`.
Functions can be renamed by passing a name argument:
@method(name=bar)
def foo():
...
"""
def decorator(func: Method) -> Method:
nonlocal name
global_methods[name or func.__name__] = func
return func
return decorator(f) if callable(f) else cast(Method, decorator)
| mit | d4ff12b5dbbc37ffe48504fb2057a311 | 31.302326 | 87 | 0.694024 | 4.183735 | false | false | false | false |
sissaschool/elementpath | elementpath/xpath31/_xpath31_functions.py | 1 | 3474 | #
# Copyright (c), 2018-2021, SISSA (International School for Advanced Studies).
# All rights reserved.
# This file is distributed under the terms of the MIT License.
# See the file 'LICENSE' in the root directory of the present
# distribution, or http://opensource.org/licenses/MIT.
#
# @author Davide Brunato <brunato@sissa.it>
#
# type: ignore
"""
XPath 3.1 implementation - part 3 (functions)
"""
from ..datatypes import AnyAtomicType
from ..xpath_token import XPathMap, XPathArray
from ._xpath31_operators import XPath31Parser
method = XPath31Parser.method
function = XPath31Parser.function
XPath31Parser.unregister('string-join')
@method(function('string-join', nargs=(1, 2),
sequence_types=('xs:anyAtomicType*', 'xs:string', 'xs:string')))
def evaluate_string_join_function(self, context=None):
items = [self.string_value(s) for s in self[0].select(context)]
if len(self) == 1:
return ''.join(items)
return self.get_argument(context, 1, required=True, cls=str).join(items)
@method(function('size', prefix='map', label='map function', nargs=1,
sequence_types=('map(*)', 'xs:integer')))
def evaluate_map_size_function(self, context=None):
return len(self.get_argument(context, required=True, cls=XPathMap))
@method(function('keys', prefix='map', label='map function', nargs=1,
sequence_types=('map(*)', 'xs:anyAtomicType*')))
def evaluate_map_keys_function(self, context=None):
map_ = self.get_argument(context, required=True, cls=XPathMap)
return map_.keys(context)
@method(function('contains', prefix='map', label='map function', nargs=2,
sequence_types=('map(*)', 'xs:anyAtomicType', 'xs:boolean')))
def evaluate_map_contains_function(self, context=None):
map_ = self.get_argument(context, required=True, cls=XPathMap)
key = self.get_argument(context, index=1, required=True, cls=AnyAtomicType)
return map_.contains(context, key)
@method(function('get', prefix='map', label='map function', nargs=2,
sequence_types=('map(*)', 'xs:anyAtomicType', 'item()*')))
def evaluate_map_get_function(self, context=None):
map_ = self.get_argument(context, required=True, cls=XPathMap)
key = self.get_argument(context, index=1, required=True, cls=AnyAtomicType)
return map_(context, key)
@method(function('size', prefix='array', label='array function', nargs=1,
sequence_types=('array(*)', 'xs:integer')))
def evaluate_array_size_function(self, context=None):
return len(self.get_argument(context, required=True, cls=XPathArray))
@method(function('get', prefix='array', label='array function', nargs=2,
sequence_types=('array(*)', 'xs:integer', 'item()*')))
def evaluate_array_get_function(self, context=None):
array_ = self.get_argument(context, required=True, cls=XPathArray)
position = self.get_argument(context, index=1, required=True, cls=int)
return array_(context, position)
@method(function('put', prefix='array', label='array function', nargs=3,
sequence_types=('array(*)', 'xs:integer', 'item()*', 'array(*)')))
def evaluate_array_put_function(self, context=None):
array_ = self.get_argument(context, required=True, cls=XPathArray)
position = self.get_argument(context, index=1, required=True, cls=int)
member = self[2].evaluate(context)
if member is None:
member = []
return array_.put(position, member, context)
| mit | 7faa353055e33f6ee69fba54b674b32e | 39.395349 | 83 | 0.681059 | 3.477477 | false | false | false | false |
andriyko/sublime-robot-framework-assistant | dataparser/run_index.py | 2 | 2605 | import argparse
import sys
import shutil
import multiprocessing
from os import path, listdir, makedirs
ROOT_DIR = path.dirname(path.abspath(__file__))
SETTING_DIR = path.join(ROOT_DIR, '..', 'setting')
sys.path.append(SETTING_DIR)
from index.index import index_a_table
from index.index import Index
def index_all(db_path, index_path, module_search_path, libs_in_xml):
for path_ in module_search_path:
sys.path.append(path_)
tables = listdir(db_path)
params = []
for table in tables:
params.append((db_path, table, index_path, libs_in_xml))
if path.exists(index_path):
shutil.rmtree(index_path)
makedirs(index_path)
pool = multiprocessing.Pool()
pool.map(index_a_table, params)
def index_single(db_path, db_table, index_path, module_search_path,
libs_in_xml):
for path_ in module_search_path:
sys.path.append(path_)
if not path.exists(index_path):
makedirs(index_path)
index = Index(db_path=db_path, index_path=index_path,
xml_libraries=libs_in_xml)
index.index_consturctor(table=db_table)
if __name__ == '__main__':
c_parser = argparse.ArgumentParser(
description='Indexing Scanner results')
c_parser.add_argument(
'mode',
choices=['all', 'single'],
help='Index mode: all or single'
)
c_parser.add_argument(
'--db_path',
required=True,
help='Folder where Scanner result is read'
)
c_parser.add_argument(
'--db_table',
help='File name, in the db_path folder, where index is created'
)
c_parser.add_argument(
'--index_path',
required=True,
help='Folder where index result is saved'
)
c_parser.add_argument(
'--module_search_path',
nargs='*',
help='List of paths where libraries are searched when indexing')
c_parser.add_argument(
'--path_to_lib_in_xml',
help='Path to libraries in XML format')
args = c_parser.parse_args()
module_search_path = []
if args.module_search_path:
module_search_path = args.module_search_path
if args.mode == 'all':
index_all(
args.db_path,
args.index_path,
module_search_path,
args.path_to_lib_in_xml
)
else:
index_single(
args.db_path,
args.db_table,
args.index_path,
module_search_path,
args.path_to_lib_in_xml
)
| mit | a3867ffd5feb290fd0ab258b7327fe94 | 28.290698 | 72 | 0.578503 | 3.658708 | false | false | false | false |
robbievanleeuwen/section-properties | sectionproperties/pre/pre.py | 1 | 4404 | from typing import Union, List
from dataclasses import dataclass
import numpy as np
import triangle
class GeometryError(Exception):
"""Exception raised when invalid geometry is found."""
pass
@dataclass(eq=True, frozen=True)
class Material:
"""Class for structural materials.
Provides a way of storing material properties related to a specific material. The color can be
a multitude of different formats, refer to https://matplotlib.org/api/colors_api.html and
https://matplotlib.org/examples/color/named_colors.html for more information.
:param string name: Material name
:param float elastic_modulus: Material modulus of elasticity
:param float poissons_ratio: Material Poisson's ratio
:param float yield_strength: Material yield strength
:param float density: Material density (mass per unit volume)
:param color: Material color for rendering
:type color: :class:`matplotlib.colors`
:cvar string name: Material name
:cvar float elastic_modulus: Material modulus of elasticity
:cvar float poissons_ratio: Material Poisson's ratio
:cvar float shear_modulus: Material shear modulus, derived from the elastic modulus and
Poisson's ratio assuming an isotropic material
:cvar float density: Material density (mass per unit volume)
:cvar float yield_strength: Material yield strength
:cvar color: Material color for rendering
:vartype color: :class:`matplotlib.colors`
The following example creates materials for concrete, steel and timber::
from sectionproperties.pre.pre import Material
concrete = Material(
name='Concrete', elastic_modulus=30.1e3, poissons_ratio=0.2, density=2.4e-6,
yield_strength=32, color='lightgrey'
)
steel = Material(
name='Steel', elastic_modulus=200e3, poissons_ratio=0.3, density=7.85e-6,
yield_strength=500, color='grey'
)
timber = Material(
name='Timber', elastic_modulus=8e3, poissons_ratio=0.35, density=6.5e-7,
yield_strength=20, color='burlywood'
)
"""
name: str
elastic_modulus: float
poissons_ratio: float
yield_strength: float
density: float
color: str
@property
def shear_modulus(self):
return self.elastic_modulus / (2 * (1 + self.poissons_ratio))
DEFAULT_MATERIAL = Material("default", 1, 0, 1, 1, "w")
def create_mesh(
points: List[List[float]],
facets: List[List[float]],
holes: List[List[float]],
control_points: List[List[float]],
mesh_sizes: Union[List[float], float],
coarse: bool,
):
"""Creates a quadratic triangular mesh using the triangle module, which utilises the code
'Triangle', by Jonathan Shewchuk.
:param points: List of points *(x, y)* defining the vertices of the cross-section
:type points: list[list[float, float]]
:param facets: List of point index pairs *(p1, p2)* defining the edges of the cross-section
:type points: list[list[int, int]]
:param holes: List of points *(x, y)* defining the locations of holes within the cross-section.
If there are no holes, provide an empty list [].
:type holes: list[list[float, float]]
:param control_points: A list of points *(x, y)* that define different regions of the
cross-section. A control point is an arbitrary point within a region enclosed by facets.
:type control_points: list[list[float, float]]
:param mesh_sizes: List of maximum element areas for each region defined by a control point
:type mesh_sizes: list[float]
:param bool coarse: If set to True, will create a coarse mesh (no area or quality
constraints)
:return: Dictionary containing mesh data
:rtype: dict()
"""
if not isinstance(mesh_sizes, list):
mesh_sizes = [mesh_sizes]
tri = {} # create tri dictionary
tri["vertices"] = points # set point
tri["segments"] = facets # set facets
if holes:
tri["holes"] = holes # set holes
# prepare regions
regions = []
for (i, cp) in enumerate(control_points):
regions.append([cp[0], cp[1], i, mesh_sizes[i]])
tri["regions"] = regions # set regions
# generate mesh
if coarse:
mesh = triangle.triangulate(tri, "pAo2")
else:
mesh = triangle.triangulate(tri, "pq30Aao2")
return mesh
| mit | c82368e4f78b6dc861d7ead6c3fc03b4 | 33.952381 | 99 | 0.673933 | 3.819601 | false | false | false | false |
gae-init/gae-init | main/model/config_auth.py | 3 | 2679 | # coding: utf-8
from __future__ import absolute_import
from google.appengine.ext import ndb
from api import fields
import model
class ConfigAuth(object):
bitbucket_key = ndb.StringProperty(default='', verbose_name='Key')
bitbucket_secret = ndb.StringProperty(default='', verbose_name='Secret')
facebook_app_id = ndb.StringProperty(default='', verbose_name='App ID')
facebook_app_secret = ndb.StringProperty(default='', verbose_name='App Secret')
github_client_id = ndb.StringProperty(default='', verbose_name='Client ID')
github_client_secret = ndb.StringProperty(default='', verbose_name='Client Secret')
google_client_id = ndb.StringProperty(default='', verbose_name='Client ID')
google_client_secret = ndb.StringProperty(default='', verbose_name='Client Secret')
linkedin_api_key = ndb.StringProperty(default='', verbose_name='API Key')
linkedin_secret_key = ndb.StringProperty(default='', verbose_name='Secret Key')
microsoft_client_id = ndb.StringProperty(default='', verbose_name='Client ID')
microsoft_client_secret = ndb.StringProperty(default='', verbose_name='Client Secret')
twitter_consumer_key = ndb.StringProperty(default='', verbose_name='Consumer Key')
twitter_consumer_secret = ndb.StringProperty(default='', verbose_name='Consumer Secret')
@property
def has_bitbucket(self):
return bool(self.bitbucket_key and self.bitbucket_secret)
@property
def has_facebook(self):
return bool(self.facebook_app_id and self.facebook_app_secret)
@property
def has_google(self):
return bool(self.google_client_id and self.google_client_secret)
@property
def has_github(self):
return bool(self.github_client_id and self.github_client_secret)
@property
def has_linkedin(self):
return bool(self.linkedin_api_key and self.linkedin_secret_key)
@property
def has_microsoft(self):
return bool(self.microsoft_client_id and self.microsoft_client_secret)
@property
def has_twitter(self):
return bool(self.twitter_consumer_key and self.twitter_consumer_secret)
FIELDS = {
'bitbucket_key': fields.String,
'bitbucket_secret': fields.String,
'facebook_app_id': fields.String,
'facebook_app_secret': fields.String,
'github_client_id': fields.String,
'github_client_secret': fields.String,
'google_client_id': fields.String,
'google_client_secret': fields.String,
'linkedin_api_key': fields.String,
'linkedin_secret_key': fields.String,
'microsoft_client_id': fields.String,
'microsoft_client_secret': fields.String,
'twitter_consumer_key': fields.String,
'twitter_consumer_secret': fields.String,
}
FIELDS.update(model.Base.FIELDS)
| mit | 6d8bf69f54b8edfd2ac05c0e484448e5 | 35.69863 | 90 | 0.72751 | 3.674897 | false | false | false | false |
eerimoq/cantools | cantools/tester.py | 2 | 13317 | # The tester module.
import time
from collections import UserDict
import queue
import can
from .errors import Error
class DecodedMessage(object):
"""A decoded message.
"""
def __init__(self, name, signals):
self.name = name
self.signals = signals
class Messages(dict):
def __missing__(self, key):
raise Error("invalid message name '{}'".format(key))
class Listener(can.Listener):
def __init__(self, database, messages, input_queue, on_message):
self._database = database
self._messages = messages
self._input_queue = input_queue
self._on_message = on_message
def on_message_received(self, msg):
if msg.is_error_frame or msg.is_remote_frame:
return
try:
database_message = self._database.get_message_by_frame_id(
msg.arbitration_id)
except KeyError:
return
if database_message.name not in self._messages:
return
message = self._messages[database_message.name]
if not message.enabled:
return
decoded = DecodedMessage(database_message.name,
database_message.decode(msg.data,
message.decode_choices,
message.scaling))
if self._on_message:
self._on_message(decoded)
self._input_queue.put(decoded)
class Message(UserDict, object):
def __init__(self,
database,
can_bus,
input_list,
input_queue,
decode_choices,
scaling,
padding):
super(Message, self).__init__()
self.database = database
self._can_bus = can_bus
self._input_queue = input_queue
self.decode_choices = decode_choices
self.scaling = scaling
self.padding = padding
self._input_list = input_list
self.enabled = True
self._can_message = None
self._periodic_task = None
self.update(self._prepare_initial_signal_values())
@property
def periodic(self):
return self.database.cycle_time is not None
def __getitem__(self, signal_name):
return self.data[signal_name]
def __setitem__(self, signal_name, value):
self.data[signal_name] = value
self._update_can_message()
def update(self, signals):
self.data.update(signals)
self._update_can_message()
def send(self, signals=None):
if signals is not None:
self.update(signals)
self._can_bus.send(self._can_message)
def expect(self, signals=None, timeout=None, discard_other_messages=True):
if signals is None:
signals = {}
decoded = self._expect_input_list(signals, discard_other_messages)
if decoded is None:
decoded = self._expect_input_queue(signals,
timeout,
discard_other_messages)
return decoded
def _expect_input_list(self, signals, discard_other_messages):
other_messages = []
while len(self._input_list) > 0:
message = self._input_list.pop(0)
decoded = self._filter_expected_message(message, signals)
if decoded is not None:
break
other_messages.append(message)
else:
decoded = None
if not discard_other_messages:
other_messages += self._input_list
del self._input_list[:]
self._input_list.extend(other_messages)
return decoded
def _expect_input_queue(self, signals, timeout, discard_other_messages):
if timeout is not None:
end_time = time.time() + timeout
remaining_time = timeout
else:
remaining_time = None
while True:
try:
message = self._input_queue.get(timeout=remaining_time)
except queue.Empty:
return
decoded = self._filter_expected_message(message, signals)
if decoded is not None:
return decoded
if not discard_other_messages:
self._input_list.append(message)
if timeout is not None:
remaining_time = end_time - time.time()
if remaining_time <= 0:
return
def _filter_expected_message(self, message, signals):
if message.name == self.database.name:
if all([message.signals[name] == signals[name] for name in signals]):
return message.signals
def send_periodic_start(self):
if not self.enabled:
return
self._periodic_task = self._can_bus.send_periodic(
self._can_message,
self.database.cycle_time / 1000.0)
def send_periodic_stop(self):
if self._periodic_task is not None:
self._periodic_task.stop()
self._periodic_task = None
def _update_can_message(self):
arbitration_id = self.database.frame_id
extended_id = self.database.is_extended_frame
data = self.database.encode(self.data,
self.scaling,
self.padding)
self._can_message = can.Message(arbitration_id=arbitration_id,
is_extended_id=extended_id,
data=data)
if self._periodic_task is not None:
self._periodic_task.modify_data(self._can_message)
def _prepare_initial_signal_values(self):
initial_sig_values = dict()
for signal in self.database.signals:
minimum = 0 if not signal.minimum else signal.minimum
maximum = 0 if not signal.maximum else signal.maximum
if signal.initial:
# use initial signal value (if set)
initial_sig_values[signal.name] = (signal.initial * signal.decimal.scale) + signal.decimal.offset
elif minimum <= 0 <= maximum:
# use 0 if in allowed range
initial_sig_values[signal.name] = 0
else:
# set at least some default value
initial_sig_values[signal.name] = minimum
return initial_sig_values
class Tester(object):
"""Test given node `dut_name` on given CAN bus `bus_name`.
`database` is a :class:`~cantools.database.can.Database` instance.
`can_bus` a CAN bus object, normally created using the python-can
package.
The `on_message` callback is called for every successfully decoded
received message. It is called with one argument, an
:class:`~cantools.tester.DecodedMessage` instance.
Here is an example of how to create a tester:
>>> import can
>>> import cantools
>>> can.rc['interface'] = 'socketcan'
>>> can.rc['channel'] = 'vcan0'
>>> can_bus = can.interface.Bus()
>>> database = cantools.database.load_file('tests/files/tester.kcd')
>>> tester = cantools.tester.Tester('PeriodicConsumer', database, can_bus, 'PeriodicBus')
"""
def __init__(self,
dut_name,
database,
can_bus,
bus_name=None,
on_message=None,
decode_choices=True,
scaling=True,
padding=False):
self._dut_name = dut_name
self._bus_name = bus_name
self._database = database
self._can_bus = can_bus
self._input_list = []
self._input_queue = queue.Queue()
self._messages = Messages()
self._is_running = False
# DUT name validation.
node_names = [node.name for node in database.nodes]
if not any([name == dut_name for name in node_names]):
raise Error(
"expected DUT name in {}, but got '{}'".format(node_names,
dut_name))
# BUS name validation.
bus_names = [bus.name for bus in database.buses]
if len(bus_names) == 0:
if bus_name is not None:
raise Error(
"expected bus name None as there are no buses defined in "
"the database, but got '{}'".format(bus_name))
elif not any([name == bus_name for name in bus_names]):
raise Error(
"expected bus name in {}, but got '{}'".format(bus_names,
bus_name))
for message in database.messages:
if message.bus_name == bus_name:
self._messages[message.name] = Message(message,
can_bus,
self._input_list,
self._input_queue,
decode_choices,
scaling,
padding)
listener = Listener(self._database,
self._messages,
self._input_queue,
on_message)
self._notifier = can.Notifier(can_bus, [listener])
def start(self):
"""Start the tester. Starts sending enabled periodic messages.
>>> tester.start()
"""
for message in self._messages.values():
if self._dut_name in message.database.senders:
continue
if not message.periodic:
continue
message.send_periodic_start()
self._is_running = True
def stop(self):
"""Stop the tester. Periodic messages will not be sent after this
call. Call :meth:`~cantools.tester.Tester.start()` to resume a
stopped tester.
>>> tester.stop()
"""
for message in self._messages.values():
message.send_periodic_stop()
self._is_running = False
@property
def messages(self):
"""Set and get signals in messages. Set signals takes effect
immediately for started enabled periodic messages. Call
:meth:`~cantools.tester.Tester.send()` for other messages.
>>> periodic_message = tester.messages['PeriodicMessage1']
>>> periodic_message
{'Signal1': 0, 'Signal2': 0}
>>> periodic_message['Signal1'] = 1
>>> periodic_message.update({'Signal1': 2, 'Signal2': 5})
>>> periodic_message
{'Signal1': 2, 'Signal2': 5}
"""
return self._messages
def enable(self, message_name):
"""Enable given message `message_name` and start sending it if its
periodic and the tester is running.
>>> tester.enable('PeriodicMessage1')
"""
message = self._messages[message_name]
message.enabled = True
if self._is_running and message.periodic:
message.send_periodic_start()
def disable(self, message_name):
"""Disable given message `message_name` and stop sending it if its
periodic, enabled and the tester is running.
>>> tester.disable('PeriodicMessage1')
"""
message = self._messages[message_name]
message.enabled = False
if self._is_running and message.periodic:
message.send_periodic_stop()
def send(self, message_name, signals=None):
"""Send given message `message_name` and optional signals `signals`.
>>> tester.send('Message1', {'Signal2': 10})
>>> tester.send('Message1')
"""
self._messages[message_name].send(signals)
def expect(self,
message_name,
signals=None,
timeout=None,
discard_other_messages=True):
"""Expect given message `message_name` and signal values `signals`
within `timeout` seconds.
Give `signals` as ``None`` to expect any signal values.
Give `timeout` as ``None`` to wait forever.
Messages are read from the input queue, and those not matching
given `message_name` and `signals` are discarded if
`discard_other_messages` is
``True``. :meth:`~cantools.tester.Tester.flush_input()` may be
called to discard all old messages in the input queue before
calling the expect function.
Returns the expected message, or ``None`` on timeout.
>>> tester.expect('Message2', {'Signal1': 13})
{'Signal1': 13, 'Signal2': 9}
"""
return self._messages[message_name].expect(signals,
timeout,
discard_other_messages)
def flush_input(self):
"""Flush, or discard, all messages in the input queue.
"""
del self._input_list[:]
while not self._input_queue.empty():
self._input_queue.get()
| mit | 4bb3bc5c4ee03c598eec4551893f9f37 | 30.48227 | 113 | 0.535706 | 4.617545 | false | true | false | false |
gae-init/gae-init | main/control/admin.py | 2 | 6189 | # coding: utf-8
import flask
import flask_wtf
import wtforms
import auth
import config
import model
import util
from main import app
###############################################################################
# Admin Stuff
###############################################################################
@app.route('/admin/')
@auth.admin_required
def admin():
localhost = None
if config.DEVELOPMENT and ':' in flask.request.host:
try:
parts = flask.request.host.split(':')
port = int(parts[1]) + 1
localhost = 'http://%s:%s/' % (parts[0], port)
except:
pass
return flask.render_template(
'admin/admin.html',
title='Admin',
html_class='admin',
localhost=localhost,
)
###############################################################################
# Config Stuff
###############################################################################
class ConfigUpdateForm(flask_wtf.FlaskForm):
analytics_id = wtforms.StringField(model.Config.analytics_id._verbose_name, filters=[util.strip_filter])
announcement_html = wtforms.TextAreaField(model.Config.announcement_html._verbose_name, filters=[util.strip_filter])
announcement_type = wtforms.SelectField(model.Config.announcement_type._verbose_name, choices=[(t, t.title()) for t in model.Config.announcement_type._choices])
anonymous_recaptcha = wtforms.BooleanField(model.Config.anonymous_recaptcha._verbose_name)
brand_name = wtforms.StringField(model.Config.brand_name._verbose_name, [wtforms.validators.required()], filters=[util.strip_filter])
check_unique_email = wtforms.BooleanField(model.Config.check_unique_email._verbose_name)
email_authentication = wtforms.BooleanField(model.Config.email_authentication._verbose_name)
feedback_email = wtforms.StringField(model.Config.feedback_email._verbose_name, [wtforms.validators.optional(), wtforms.validators.email()], filters=[util.email_filter])
flask_secret_key = wtforms.StringField(model.Config.flask_secret_key._verbose_name, [wtforms.validators.optional()], filters=[util.strip_filter])
notify_on_new_user = wtforms.BooleanField(model.Config.notify_on_new_user._verbose_name)
recaptcha_private_key = wtforms.StringField(model.Config.recaptcha_private_key._verbose_name, filters=[util.strip_filter])
recaptcha_public_key = wtforms.StringField(model.Config.recaptcha_public_key._verbose_name, filters=[util.strip_filter])
salt = wtforms.StringField(model.Config.salt._verbose_name, [wtforms.validators.optional()], filters=[util.strip_filter])
trusted_hosts = wtforms.StringField(model.Config.trusted_hosts._verbose_name, [wtforms.validators.optional()], description='Comma separated: 127.0.0.1, example.com, etc')
verify_email = wtforms.BooleanField(model.Config.verify_email._verbose_name)
@app.route('/admin/config/', methods=['GET', 'POST'])
@auth.admin_required
def admin_config():
config_db = model.Config.get_master_db()
form = ConfigUpdateForm(obj=config_db)
if form.validate_on_submit():
if form.trusted_hosts.data:
form.trusted_hosts.data = set(
[e.strip() for e in form.trusted_hosts.data.split(',')])
else:
form.trusted_hosts.data = []
form.populate_obj(config_db)
if not config_db.flask_secret_key:
config_db.flask_secret_key = util.uuid()
if not config_db.salt:
config_db.salt = util.uuid()
config_db.put()
reload(config)
app.config.update(CONFIG_DB=config_db)
return flask.redirect(flask.url_for('admin'))
form.trusted_hosts.data = ', '.join(config_db.trusted_hosts)
return flask.render_template(
'admin/admin_config.html',
title='App Config',
html_class='admin-config',
form=form,
api_url=flask.url_for('api.admin.config'),
)
###############################################################################
# Auth Stuff
###############################################################################
class AuthUpdateForm(flask_wtf.FlaskForm):
bitbucket_key = wtforms.StringField(model.Config.bitbucket_key._verbose_name, filters=[util.strip_filter])
bitbucket_secret = wtforms.StringField(model.Config.bitbucket_secret._verbose_name, filters=[util.strip_filter])
facebook_app_id = wtforms.StringField(model.Config.facebook_app_id._verbose_name, filters=[util.strip_filter])
facebook_app_secret = wtforms.StringField(model.Config.facebook_app_secret._verbose_name, filters=[util.strip_filter])
github_client_id = wtforms.StringField(model.Config.github_client_id._verbose_name, filters=[util.strip_filter])
github_client_secret = wtforms.StringField(model.Config.github_client_secret._verbose_name, filters=[util.strip_filter])
google_client_id = wtforms.StringField(model.Config.google_client_id._verbose_name, filters=[util.strip_filter])
google_client_secret = wtforms.StringField(model.Config.google_client_secret._verbose_name, filters=[util.strip_filter])
linkedin_api_key = wtforms.StringField(model.Config.linkedin_api_key._verbose_name, filters=[util.strip_filter])
linkedin_secret_key = wtforms.StringField(model.Config.linkedin_secret_key._verbose_name, filters=[util.strip_filter])
microsoft_client_id = wtforms.StringField(model.Config.microsoft_client_id._verbose_name, filters=[util.strip_filter])
microsoft_client_secret = wtforms.StringField(model.Config.microsoft_client_secret._verbose_name, filters=[util.strip_filter])
twitter_consumer_key = wtforms.StringField(model.Config.twitter_consumer_key._verbose_name, filters=[util.strip_filter])
twitter_consumer_secret = wtforms.StringField(model.Config.twitter_consumer_secret._verbose_name, filters=[util.strip_filter])
@app.route('/admin/auth/', methods=['GET', 'POST'])
@auth.admin_required
def admin_auth():
config_db = model.Config.get_master_db()
form = AuthUpdateForm(obj=config_db)
if form.validate_on_submit():
form.populate_obj(config_db)
config_db.put()
reload(config)
app.config.update(CONFIG_DB=config_db)
return flask.redirect(flask.url_for('admin'))
return flask.render_template(
'admin/admin_auth.html',
title='Auth Config',
html_class='admin-auth',
form=form,
api_url=flask.url_for('api.admin.config'),
)
| mit | fbef850c0ce41cf32e34fcc4fb82ad2d | 47.351563 | 172 | 0.682986 | 3.647024 | false | true | false | false |
gae-init/gae-init | main/control/user.py | 2 | 11875 | # coding: utf-8
import copy
from google.appengine.ext import ndb
from webargs import fields as wf
from webargs.flaskparser import parser
import flask
import flask_login
import flask_wtf
import wtforms
import auth
import cache
import config
import model
import task
import util
from main import app
###############################################################################
# User List
###############################################################################
@app.route('/admin/user/')
@auth.admin_required
def user_list():
args = parser.parse({
'email': wf.Str(missing=None),
'permissions': wf.DelimitedList(wf.Str(), delimiter=',', missing=[]),
})
user_dbs, cursors = model.User.get_dbs(
email=args['email'], prev_cursor=True,
)
permissions = list(UserUpdateForm._permission_choices)
permissions += args['permissions']
return flask.render_template(
'user/user_list.html',
html_class='user-list',
title='User List',
user_dbs=user_dbs,
next_url=util.generate_next_url(cursors['next']),
prev_url=util.generate_next_url(cursors['prev']),
api_url=flask.url_for('api.admin.user.list'),
permissions=sorted(set(permissions)),
)
###############################################################################
# User Update
###############################################################################
class UserUpdateForm(flask_wtf.FlaskForm):
username = wtforms.StringField(
model.User.username._verbose_name,
[wtforms.validators.required(), wtforms.validators.length(min=2)],
filters=[util.email_filter],
)
name = wtforms.StringField(
model.User.name._verbose_name,
[wtforms.validators.required()], filters=[util.strip_filter],
)
email = wtforms.StringField(
model.User.email._verbose_name,
[wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter],
)
admin = wtforms.BooleanField(model.User.admin._verbose_name)
active = wtforms.BooleanField(model.User.active._verbose_name)
verified = wtforms.BooleanField(model.User.verified._verbose_name)
permissions = wtforms.SelectMultipleField(
model.User.permissions._verbose_name,
filters=[util.sort_filter],
)
_permission_choices = set()
def __init__(self, *args, **kwds):
super(UserUpdateForm, self).__init__(*args, **kwds)
self.permissions.choices = [
(p, p) for p in sorted(UserUpdateForm._permission_choices)
]
@auth.permission_registered.connect
def _permission_registered_callback(sender, permission):
UserUpdateForm._permission_choices.add(permission)
@app.route('/admin/user/create/', methods=['GET', 'POST'])
@app.route('/admin/user/<int:user_id>/update/', methods=['GET', 'POST'])
@auth.admin_required
def user_update(user_id=0):
if user_id:
user_db = model.User.get_by_id(user_id)
else:
user_db = model.User(name='', username='')
if not user_db:
flask.abort(404)
form = UserUpdateForm(obj=user_db)
for permission in user_db.permissions:
form.permissions.choices.append((permission, permission))
form.permissions.choices = sorted(set(form.permissions.choices))
if form.validate_on_submit():
if not util.is_valid_username(form.username.data):
form.username.errors.append('This username is invalid.')
elif not model.User.is_username_available(form.username.data, user_db.key):
form.username.errors.append('This username is already taken.')
else:
form.populate_obj(user_db)
if auth.current_user_key() == user_db.key:
user_db.admin = True
user_db.active = True
user_db.put()
return flask.redirect(flask.url_for(
'user_list', order='-modified', active=user_db.active,
))
return flask.render_template(
'user/user_update.html',
title=user_db.name or 'New User',
html_class='user-update',
form=form,
user_db=user_db,
api_url=flask.url_for('api.admin.user', user_key=user_db.key.urlsafe()) if user_db.key else ''
)
###############################################################################
# User Verify
###############################################################################
@app.route('/user/verify/<token>/')
@auth.login_required
def user_verify(token):
user_db = auth.current_user_db()
if user_db.token != token:
flask.flash('That link is either invalid or expired.', category='danger')
return flask.redirect(flask.url_for('profile'))
user_db.verified = True
user_db.token = util.uuid()
user_db.put()
flask.flash('Hooray! Your email is now verified.', category='success')
return flask.redirect(flask.url_for('profile'))
###############################################################################
# User Forgot
###############################################################################
class UserForgotForm(flask_wtf.FlaskForm):
email = wtforms.StringField(
'Email',
[wtforms.validators.required(), wtforms.validators.email()],
filters=[util.email_filter],
)
recaptcha = flask_wtf.RecaptchaField()
@app.route('/user/forgot/', methods=['GET', 'POST'])
def user_forgot(token=None):
if not config.CONFIG_DB.has_email_authentication:
flask.abort(418)
form = auth.form_with_recaptcha(UserForgotForm(obj=auth.current_user_db()))
if form.validate_on_submit():
cache.bump_auth_attempt()
email = form.email.data
user_dbs, cursors = util.get_dbs(
model.User.query(), email=email, active=True, limit=2,
)
count = len(user_dbs)
if count == 1:
task.reset_password_notification(user_dbs[0])
return flask.redirect(flask.url_for('welcome'))
elif count == 0:
form.email.errors.append('This email was not found')
elif count == 2:
task.email_conflict_notification(email)
form.email.errors.append(
'''We are sorry but it looks like there is a conflict with your
account. Our support team is already informed and we will get back to
you as soon as possible.'''
)
if form.errors:
cache.bump_auth_attempt()
return flask.render_template(
'user/user_forgot.html',
title='Forgot Password?',
html_class='user-forgot',
form=form,
)
###############################################################################
# User Reset
###############################################################################
class UserResetForm(flask_wtf.FlaskForm):
new_password = wtforms.StringField(
'New Password',
[wtforms.validators.required(), wtforms.validators.length(min=6)],
)
@app.route('/user/reset/<token>/', methods=['GET', 'POST'])
@app.route('/user/reset/')
def user_reset(token=None):
user_db = model.User.get_by('token', token)
if not user_db:
flask.flash('That link is either invalid or expired.', category='danger')
return flask.redirect(flask.url_for('welcome'))
if auth.is_logged_in():
flask_login.logout_user()
return flask.redirect(flask.request.path)
form = UserResetForm()
if form.validate_on_submit():
user_db.password_hash = util.password_hash(user_db, form.new_password.data)
user_db.token = util.uuid()
user_db.verified = True
user_db.put()
flask.flash('Your password was changed successfully.', category='success')
return auth.signin_user_db(user_db)
return flask.render_template(
'user/user_reset.html',
title='Reset Password',
html_class='user-reset',
form=form,
user_db=user_db,
)
###############################################################################
# User Activate
###############################################################################
class UserActivateForm(flask_wtf.FlaskForm):
name = wtforms.StringField(
model.User.name._verbose_name,
[wtforms.validators.required()], filters=[util.strip_filter],
)
password = wtforms.StringField(
'Password',
[wtforms.validators.required(), wtforms.validators.length(min=6)],
)
@app.route('/user/activate/<token>/', methods=['GET', 'POST'])
def user_activate(token):
if auth.is_logged_in():
flask_login.logout_user()
return flask.redirect(flask.request.path)
user_db = model.User.get_by('token', token)
if not user_db:
flask.flash('That link is either invalid or expired.', category='danger')
return flask.redirect(flask.url_for('welcome'))
form = UserActivateForm(obj=user_db)
if form.validate_on_submit():
form.populate_obj(user_db)
user_db.password_hash = util.password_hash(user_db, form.password.data)
user_db.token = util.uuid()
user_db.verified = True
user_db.put()
return auth.signin_user_db(user_db)
return flask.render_template(
'user/user_activate.html',
title='Activate Account',
html_class='user-activate',
user_db=user_db,
form=form,
)
###############################################################################
# User Merge
###############################################################################
class UserMergeForm(flask_wtf.FlaskForm):
user_key = wtforms.StringField('User Key', [wtforms.validators.required()])
user_keys = wtforms.StringField('User Keys', [wtforms.validators.required()])
username = wtforms.StringField('Username', [wtforms.validators.optional()])
name = wtforms.StringField(
'Name (merged)',
[wtforms.validators.required()], filters=[util.strip_filter],
)
email = wtforms.StringField(
'Email (merged)',
[wtforms.validators.optional(), wtforms.validators.email()],
filters=[util.email_filter],
)
@app.route('/admin/user/merge/', methods=['GET', 'POST'])
@auth.admin_required
def user_merge():
args = parser.parse({
'user_key': wf.Str(missing=None),
'user_keys': wf.DelimitedList(wf.Str(), delimiter=',', required=True),
})
user_db_keys = [ndb.Key(urlsafe=k) for k in args['user_keys']]
user_dbs = ndb.get_multi(user_db_keys)
if len(user_dbs) < 2:
flask.abort(400)
user_dbs.sort(key=lambda user_db: user_db.created)
merged_user_db = user_dbs[0]
auth_ids = []
permissions = []
is_admin = False
is_active = False
for user_db in user_dbs:
auth_ids.extend(user_db.auth_ids)
permissions.extend(user_db.permissions)
is_admin = is_admin or user_db.admin
is_active = is_active or user_db.active
if user_db.key.urlsafe() == args['user_key']:
merged_user_db = user_db
auth_ids = sorted(list(set(auth_ids)))
permissions = sorted(list(set(permissions)))
merged_user_db.permissions = permissions
merged_user_db.admin = is_admin
merged_user_db.active = is_active
merged_user_db.verified = False
form_obj = copy.deepcopy(merged_user_db)
form_obj.user_key = merged_user_db.key.urlsafe()
form_obj.user_keys = ','.join(args['user_keys'])
form = UserMergeForm(obj=form_obj)
if form.validate_on_submit():
form.populate_obj(merged_user_db)
merged_user_db.auth_ids = auth_ids
merged_user_db.put()
deprecated_keys = [k for k in user_db_keys if k != merged_user_db.key]
merge_user_dbs(merged_user_db, deprecated_keys)
return flask.redirect(
flask.url_for('user_update', user_id=merged_user_db.key.id()),
)
return flask.render_template(
'user/user_merge.html',
title='Merge Users',
html_class='user-merge',
user_dbs=user_dbs,
merged_user_db=merged_user_db,
form=form,
auth_ids=auth_ids,
api_url=flask.url_for('api.admin.user.list'),
)
@ndb.transactional(xg=True)
def merge_user_dbs(user_db, deprecated_keys):
# TODO: Merge possible user data before handling deprecated users
deprecated_dbs = ndb.get_multi(deprecated_keys)
for deprecated_db in deprecated_dbs:
deprecated_db.auth_ids = []
deprecated_db.active = False
deprecated_db.verified = False
if not deprecated_db.username.startswith('_'):
deprecated_db.username = '_%s' % deprecated_db.username
ndb.put_multi(deprecated_dbs)
| mit | a38905b96a9acc2e5e877ac7be39465c | 31.269022 | 98 | 0.617937 | 3.629279 | false | false | false | false |
eerimoq/cantools | cantools/__init__.py | 2 | 2980 | import sys
import argparse
import importlib
import pathlib
import os
from . import tester
from . import j1939
from . import logreader
from .errors import Error
# Remove once less users are using the old package structure.
from . import database as db
from .version import __version__
__author__ = 'Erik Moqvist'
class _ErrorSubparser:
def __init__(self, subparser_name, error_message):
self.subparser_name = subparser_name
self.error_message = error_message
def add_subparser(self, subparser_list):
err_parser = \
subparser_list.add_parser(self.subparser_name,
description = self.error_message)
err_parser.add_argument("args", nargs="*")
err_parser.set_defaults(func=self._print_error)
def _print_error(self, args):
raise ImportError(self.error_message)
def _load_subparser(subparser_name, subparsers):
"""Load a subparser for a CLI command in a safe manner.
i.e., if the subparser cannot be loaded due to an import error or
similar, no exception is raised if another command was invoked on
the CLI."""
try:
result = importlib.import_module(f'.subparsers.{subparser_name}',
package='cantools')
result.add_subparser(subparsers)
except ImportError as e:
result = _ErrorSubparser(subparser_name,
f'Command "{subparser_name}" is unavailable: "{e}"')
result.add_subparser(subparsers)
def _main():
parser = argparse.ArgumentParser(
description='Various CAN utilities.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument('-d', '--debug', action='store_true')
parser.add_argument('--version',
action='version',
version=__version__,
help='Print version information and exit.')
# Workaround to make the subparser required in Python 3.
subparsers = parser.add_subparsers(title='subcommands',
dest='subcommand')
subparsers.required = True
# load all subparses which have a source file in the cantools
# module's 'subparsers' sub-directory
subparsers_dir = pathlib.Path(__file__).parent / 'subparsers'
for cur_file_name in os.listdir(subparsers_dir):
if cur_file_name.startswith('__'):
continue
if cur_file_name.endswith('.py'):
subparser_name = cur_file_name[:-3]
_load_subparser(subparser_name, subparsers)
elif (subparsers_dir / cur_file_name / "__init__.py").is_file():
subparser_name = cur_file_name
_load_subparser(subparser_name, subparsers)
args = parser.parse_args()
if args.debug:
args.func(args)
else:
try:
args.func(args)
except BaseException as e:
sys.exit('error: ' + str(e))
| mit | e0fad484f82fd46bf97f956ec37c9bf2 | 31.391304 | 85 | 0.612081 | 4.156206 | false | false | false | false |
eerimoq/cantools | tests/test_monitor.py | 2 | 55472 | import unittest
import curses
import traceback
try:
from unittest.mock import Mock
from unittest.mock import patch
from unittest.mock import call
except ImportError:
from mock import Mock
from mock import patch
from mock import call
import can
from cantools.subparsers.monitor import Monitor
class Args(object):
def __init__(self,
database,
single_line=False):
self.database = database
self.encoding = None
self.frame_id_mask = None
self.prune = False
self.no_strict = False
self.single_line = single_line
self.bit_rate = None
self.fd = False
self.bus_type = 'socketcan'
self.channel = 'vcan0'
class StdScr(object):
def __init__(self, user_input=None, resolution=None):
if resolution is None:
resolution = [(30, 64)]
self.getmaxyx = Mock(side_effect=resolution)
self.nodelay = Mock()
self.clear = Mock()
self.addstr = Mock()
self.refresh = Mock()
self.keypad = Mock()
if user_input is None:
user_input = ['q']
self.getkey = Mock(side_effect=user_input)
self.move = Mock()
class CanToolsMonitorTest(unittest.TestCase):
maxDiff = None
color_pair_side_effect = [ "default", "green", "cyan", "cyan inverted" ]
def assert_called(self, mock, expected, verbose=False):
try:
self.assertEqual(mock.call_args_list, expected)
except AssertionError as e:
if verbose:
nl = ",\n "
print(f"Assertion failed:")
print(f"Expected: {nl.join(map(lambda x: str(x), expected))}")
print(f"Got: {nl.join(map(lambda x: str(x), mock.call_args_list))}")
print("Traceback:")
traceback.print_stack()
raise e
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_immediate_quit(self,
use_default_colors,
curs_set,
init_pair,
is_term_resized,
color_pair,
bus,
notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.run(1)
# Check mocks.
self.assert_called(use_default_colors, [call()])
self.assert_called(curs_set, [call(False)])
self.assert_called(
init_pair,
[
call(1, curses.COLOR_BLACK, curses.COLOR_GREEN),
call(2, curses.COLOR_BLACK, curses.COLOR_CYAN),
call(3, curses.COLOR_CYAN, curses.COLOR_BLACK)
])
self.assert_called(color_pair, [call(1), call(2)])
self.assert_called(bus, [call(bustype='socketcan', channel='vcan0')])
self.assert_called(
stdscr.addstr,
[
call(0,
0,
'Received: 0, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_can_fd(self,
use_default_colors,
curs_set,
init_pair,
is_term_resized,
color_pair,
bus,
notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/motohawk.dbc')
args.fd = True
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.run(1)
# Check mocks.
self.assert_called(bus, [call(bustype='socketcan', channel='vcan0', fd=True)])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_one_frame(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_one_frame_single_line(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/motohawk.dbc',
single_line=True)
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2,
0,
" 0.000 ExampleMessage(Enable: Enabled, "
"AverageRadius: 3.2 m, Temperature: 250.55 degK)"),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_reject_muxed_data_invalid_mux_index(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/msxii_system_can.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x24\x00\x98\x98\x0b\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 1, Discarded: 1, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 BATTERY_VT ( undecoded: 0x240098980b00 )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_muxed_data(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/msxii_system_can.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x00\x00\x98\x98\x0b\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 0.000 BATTERY_VT('),
call(3, 0, " BATTERY_VT_INDEX: 0,"),
call(4, 0, ' MODULE_VOLTAGE_00: 39064,'),
call(5, 0, ' MODULE_TEMP_00: 11'),
call(6, 0, ' )'),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_muxed_data_single_line(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/msxii_system_can.dbc',
single_line=True)
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x00\x00\x98\x98\x0b\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2,
0,
" 0.000 BATTERY_VT(BATTERY_VT_INDEX: 0, "
"MODULE_VOLTAGE_00: 39064, MODULE_TEMP_00: 11)"),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_muxed_data_multiple_single_line(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/multiplex_2.dbc',
single_line=True)
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=0xc00fefe,
data=b'\x00\x00\x00\x00\x02\x00\x00\x00',
timestamp=0.0))
monitor.on_message_received(can.Message(
arbitration_id=0xc00fefe,
data=b'\x00\x00\x00\x00\x01\x00\x00\x00',
timestamp=1.0))
monitor.on_message_received(can.Message(
arbitration_id=0xc00fefe,
data=b'\x01\x00\x00\x00\x01\x00\x00\x00',
timestamp=2.0))
monitor.on_message_received(can.Message(
arbitration_id=0xc00fefe,
data=b'\x20\x00\x00\x00\x01\x00\x00\x00',
timestamp=3.0))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 4, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2,
0,
" 1.000 Extended(S0: 0, S1: 0, S2: 0, S3: 0, S6: 1, S7: 0)"),
call(3,
0,
" 0.000 Extended(S0: 0, S1: 0, S2: 0, S3: 0, S6: 2, S8: 0)"),
call(4,
0,
" 3.000 Extended(S0: 0, S1: 2, S4: 0, S6: 1, S7: 0)"),
call(5,
0,
" 2.000 Extended(S0: 1, S5: 0, S6: 1, S7: 0)"),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_one_frame_input_twice(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr()
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00',
timestamp=1.0))
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xd0\x00\x00\x00\x00\x00',
timestamp=2.1))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
call(0, 0, 'Received: 2, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 1.100 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.54 degK'),
call(6, 0, ' )'),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_filter(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr(user_input=[
'f', 'Y', '[', '\b', '\n', 'f', '\b', 'E', '\n', 'q'
])
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00'))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
# No filter.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# 'f' pressed.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: ', 'cyan'),
call(29, 14, ' ', 'cyan inverted'),
call(29, 15, ' ', 'cyan'),
# No match on 'Y'.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: Y'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(29, 0, 'Filter regex: Y', 'cyan'),
call(29, 15, ' ', 'cyan inverted'),
call(29, 16, ' ', 'cyan'),
# Invalid filter 'Y['.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: Y['),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: Y[', 'cyan'),
call(29, 16, ' ', 'cyan inverted'),
call(29, 17, ' ', 'cyan'),
# No match on 'Y'.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: Y'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(29, 0, 'Filter regex: Y', 'cyan'),
call(29, 15, ' ', 'cyan inverted'),
call(29, 16, ' ', 'cyan'),
# Hit enter to hide filter prompt.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: Y'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# 'f' pressed again.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: Y'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(29, 0, 'Filter regex: Y', 'cyan'),
call(29, 15, ' ', 'cyan inverted'),
call(29, 16, ' ', 'cyan'),
# Backspace.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: ', 'cyan'),
call(29, 14, ' ', 'cyan inverted'),
call(29, 15, ' ', 'cyan'),
# Match on 'E'.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: E'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: E', 'cyan'),
call(29, 15, ' ', 'cyan inverted'),
call(29, 16, ' ', 'cyan'),
# Hit enter to hide filter.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0, Filter: E'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_reset(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr(user_input=[
'f', 'E', '\n', 'p', ' ', 'r', 'f', '\n', 'q'
])
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00',
timestamp=3))
# Discarded.
monitor.on_message_received(can.Message(
arbitration_id=497,
data=b'\xc0\x06\xb0\x00\x00\x00\x00\x00',
timestamp=6))
monitor.tick(1)
monitor.tick(1)
monitor.tick(1)
# Input another before pause.
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xc0\x00\x00\x00\x00\x00',
timestamp=7))
monitor.tick(1)
# Input when paused. Will not be displayed.
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xd0\x00\x00\x00\x00\x00',
timestamp=10))
monitor.tick(1)
monitor.tick(1)
monitor.tick(1)
# Input after reset.
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\x00\x00\x00\x00\x00\x00',
timestamp=11))
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
# One ok and one with bad frame id.
call(0, 0, 'Received: 2, Discarded: 1, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# 'f' pressed.
call(0, 0, 'Received: 2, Discarded: 1, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: ', 'cyan'),
call(29, 14, ' ', 'cyan inverted'),
call(29, 15, ' ', 'cyan'),
# 'E' pressed.
call(0, 0, 'Received: 2, Discarded: 1, Errors: 0, Filter: E'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: E', 'cyan'),
call(29, 15, ' ', 'cyan inverted'),
call(29, 16, ' ', 'cyan'),
# '\n' pressed.
call(0, 0, 'Received: 3, Discarded: 1, Errors: 0, Filter: E'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 4.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.54 degK'),
call(6, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# 'p' pressed. Input frame not displayed.
# 'r' pressed.
call(0, 0, 'Received: 0, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# Input after reset. 'f' pressed.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.48 degK'),
call(6, 0, ' )'),
call(29, 0, 'Filter regex: ', 'cyan'),
call(29, 14, ' ', 'cyan inverted'),
call(29, 15, ' ', 'cyan'),
# '\n' pressed.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.48 degK'),
call(6, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan')
# 'q' pressed, no redraw.
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_play_pause(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr(user_input=[
' ', ' ', 'p', ' ', ' ', 'p', ' ', ' ', ' ', 'q'
])
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
for timestamp in range(4):
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00',
timestamp=timestamp))
monitor.tick(1)
# Display most recently received at unpause.
monitor.tick(1)
monitor.tick(1)
monitor.tick(1)
for timestamp in range(5, 7):
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00',
timestamp=timestamp))
monitor.tick(1)
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
# Received when playing.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29
,
0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan'),
call(0, 0, 'Received: 2, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 1.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29
,
0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan'),
call(0, 0, 'Received: 3, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 2.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29
,
0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan'),
# Received when paused, displayed at unpause.
call(0, 0, 'Received: 4, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 3.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29
,
0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan'),
# Received when playing.
call(0, 0, 'Received: 5, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 5.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29
,
0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan'),
call(0, 0, 'Received: 6, Discarded: 0, Errors: 0'),
call(1,
0,
' TIMESTAMP MESSAGE ',
'green'),
call(2, 0, ' 6.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(29,
0,
'q: Quit, f: Filter, p: Play/Pause, r: Reset ',
'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_resize(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr(user_input=[' ', ' ', 'q'],
resolution=[(30, 40), (25, 35), (25, 35), (20, 30)])
args = Args('tests/files/dbc/motohawk.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = True
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=496,
data=b'\xc0\x06\xe0\x00\x00\x00\x00\x00',
timestamp=1))
monitor.tick(1)
monitor.run(1)
# Check mocks.
self.assert_called(
stdscr.addstr,
[
# 25 x 35.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(24, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset', 'cyan'),
# 25 x 35.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(24, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset', 'cyan'),
# 20 x 30.
call(0, 0, 'Received: 1, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 ExampleMessage('),
call(3, 0, " Enable: Enabled,"),
call(4, 0, ' AverageRadius: 3.2 m,'),
call(5, 0, ' Temperature: 250.55 degK'),
call(6, 0, ' )'),
call(19, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset', 'cyan')
])
@patch('can.Notifier')
@patch('can.Bus')
@patch('curses.color_pair')
@patch('curses.is_term_resized')
@patch('curses.init_pair')
@patch('curses.curs_set')
@patch('curses.use_default_colors')
def test_display_paginated_data(self,
_use_default_colors,
_curs_set,
_init_pair,
is_term_resized,
color_pair,
_bus,
_notifier):
# Prepare mocks.
stdscr = StdScr(user_input=[
' ', 'KEY_NPAGE', 'KEY_NPAGE', 'KEY_NPAGE', 'KEY_PPAGE', 'q'
])
args = Args('tests/files/dbc/msxii_system_can.dbc')
color_pair.side_effect = lambda i: self.color_pair_side_effect[i]
is_term_resized.return_value = False
# Run monitor.
monitor = Monitor(stdscr, args)
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x00\x00\x98\x98\x0b\x00',
timestamp=0))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x01\x00\x98\x98\x0b\x00',
timestamp=1))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x02\x00\x98\x98\x0b\x00',
timestamp=2))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x03\x00\x98\x98\x0b\x00',
timestamp=3))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x04\x00\x98\x98\x0b\x00',
timestamp=4))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x05\x00\x98\x98\x0b\x00',
timestamp=5))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x06\x00\x98\x98\x0b\x00',
timestamp=6))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x07\x00\x98\x98\x0b\x00',
timestamp=7))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x08\x00\x98\x98\x0b\x00',
timestamp=8))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x09\x00\x98\x98\x0b\x00',
timestamp=9))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0a\x00\x98\x98\x0b\x00',
timestamp=10))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0b\x00\x98\x98\x0b\x00',
timestamp=11))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0c\x00\x98\x98\x0b\x00',
timestamp=12))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0d\x00\x98\x98\x0b\x00',
timestamp=13))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0e\x00\x98\x98\x0b\x00',
timestamp=14))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x0f\x00\x98\x98\x0b\x00',
timestamp=15))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x10\x00\x98\x98\x0b\x00',
timestamp=16))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x11\x00\x98\x98\x0b\x00',
timestamp=17))
monitor.on_message_received(can.Message(
arbitration_id=1025,
data=b'\x12\x00\x98\x98\x0b\x00',
timestamp=18))
monitor.tick(1)
monitor.run(1)
self.maxDiff = None
unittest.util._MAX_LENGTH=20000
# Check mocks.
self.assert_called(
stdscr.addstr,
[
# Start on page 1
call(0, 0, 'Received: 19, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' 0.000 BATTERY_VT('),
call(3, 0, ' BATTERY_VT_INDEX: 0,'),
call(4, 0, ' MODULE_VOLTAGE_00: 39064,'),
call(5, 0, ' MODULE_TEMP_00: 11'),
call(6, 0, ' )'),
call(7, 0, ' 1.000 BATTERY_VT('),
call(8, 0, ' BATTERY_VT_INDEX: 1,'),
call(9, 0, ' MODULE_VOLTAGE_01: 39064,'),
call(10, 0, ' MODULE_TEMP_01: 11'),
call(11, 0, ' )'),
call(12, 0, ' 10.000 BATTERY_VT('),
call(13, 0, ' BATTERY_VT_INDEX: 10,'),
call(14, 0, ' MODULE_VOLTAGE_10: 39064,'),
call(15, 0, ' MODULE_TEMP_10: 11'),
call(16, 0, ' )'),
call(17, 0, ' 11.000 BATTERY_VT('),
call(18, 0, ' BATTERY_VT_INDEX: 11,'),
call(19, 0, ' MODULE_VOLTAGE_11: 39064,'),
call(20, 0, ' MODULE_TEMP_11: 11'),
call(21, 0, ' )'),
call(22, 0, ' 12.000 BATTERY_VT('),
call(23, 0, ' BATTERY_VT_INDEX: 12,'),
call(24, 0, ' MODULE_VOLTAGE_12: 39064,'),
call(25, 0, ' MODULE_TEMP_12: 11'),
call(26, 0, ' )'),
call(27, 0, ' 13.000 BATTERY_VT('),
call(28, 0, ' BATTERY_VT_INDEX: 13,'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# Move to page 2
call(0, 0, 'Received: 19, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' MODULE_VOLTAGE_13: 39064,'),
call(3, 0, ' MODULE_TEMP_13: 11'),
call(4, 0, ' )'),
call(5, 0, ' 14.000 BATTERY_VT('),
call(6, 0, ' BATTERY_VT_INDEX: 14,'),
call(7, 0, ' MODULE_VOLTAGE_14: 39064,'),
call(8, 0, ' MODULE_TEMP_14: 11'),
call(9, 0, ' )'),
call(10, 0, ' 15.000 BATTERY_VT('),
call(11, 0, ' BATTERY_VT_INDEX: 15,'),
call(12, 0, ' MODULE_VOLTAGE_15: 39064,'),
call(13, 0, ' MODULE_TEMP_15: 11'),
call(14, 0, ' )'),
call(15, 0, ' 16.000 BATTERY_VT('),
call(16, 0, ' BATTERY_VT_INDEX: 16,'),
call(17, 0, ' MODULE_VOLTAGE_16: 39064,'),
call(18, 0, ' MODULE_TEMP_16: 11'),
call(19, 0, ' )'),
call(20, 0, ' 17.000 BATTERY_VT('),
call(21, 0, ' BATTERY_VT_INDEX: 17,'),
call(22, 0, ' MODULE_VOLTAGE_17: 39064,'),
call(23, 0, ' MODULE_TEMP_17: 11'),
call(24, 0, ' )'),
call(25, 0, ' 18.000 BATTERY_VT('),
call(26, 0, ' BATTERY_VT_INDEX: 18,'),
call(27, 0, ' MODULE_VOLTAGE_18: 39064,'),
call(28, 0, ' MODULE_TEMP_18: 11'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# Move to page 3
call(0, 0, 'Received: 19, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' )'),
call(3, 0, ' 2.000 BATTERY_VT('),
call(4, 0, ' BATTERY_VT_INDEX: 2,'),
call(5, 0, ' MODULE_VOLTAGE_02: 39064,'),
call(6, 0, ' MODULE_TEMP_02: 11'),
call(7, 0, ' )'),
call(8, 0, ' 3.000 BATTERY_VT('),
call(9, 0, ' BATTERY_VT_INDEX: 3,'),
call(10, 0, ' MODULE_VOLTAGE_03: 39064,'),
call(11, 0, ' MODULE_TEMP_03: 11'),
call(12, 0, ' )'),
call(13, 0, ' 4.000 BATTERY_VT('),
call(14, 0, ' BATTERY_VT_INDEX: 4,'),
call(15, 0, ' MODULE_VOLTAGE_04: 39064,'),
call(16, 0, ' MODULE_TEMP_04: 11'),
call(17, 0, ' )'),
call(18, 0, ' 5.000 BATTERY_VT('),
call(19, 0, ' BATTERY_VT_INDEX: 5,'),
call(20, 0, ' MODULE_VOLTAGE_05: 39064,'),
call(21, 0, ' MODULE_TEMP_05: 11'),
call(22, 0, ' )'),
call(23, 0, ' 6.000 BATTERY_VT('),
call(24, 0, ' BATTERY_VT_INDEX: 6,'),
call(25, 0, ' MODULE_VOLTAGE_06: 39064,'),
call(26, 0, ' MODULE_TEMP_06: 11'),
call(27, 0, ' )'),
call(28, 0, ' 7.000 BATTERY_VT('),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# Move to page 4
call(0, 0, 'Received: 19, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' MODULE_TEMP_04: 11'),
call(3, 0, ' )'),
call(4, 0, ' 5.000 BATTERY_VT('),
call(5, 0, ' BATTERY_VT_INDEX: 5,'),
call(6, 0, ' MODULE_VOLTAGE_05: 39064,'),
call(7, 0, ' MODULE_TEMP_05: 11'),
call(8, 0, ' )'),
call(9, 0, ' 6.000 BATTERY_VT('),
call(10, 0, ' BATTERY_VT_INDEX: 6,'),
call(11, 0, ' MODULE_VOLTAGE_06: 39064,'),
call(12, 0, ' MODULE_TEMP_06: 11'),
call(13, 0, ' )'),
call(14, 0, ' 7.000 BATTERY_VT('),
call(15, 0, ' BATTERY_VT_INDEX: 7,'),
call(16, 0, ' MODULE_VOLTAGE_07: 39064,'),
call(17, 0, ' MODULE_TEMP_07: 11'),
call(18, 0, ' )'),
call(19, 0, ' 8.000 BATTERY_VT('),
call(20, 0, ' BATTERY_VT_INDEX: 8,'),
call(21, 0, ' MODULE_VOLTAGE_08: 39064,'),
call(22, 0, ' MODULE_TEMP_08: 11'),
call(23, 0, ' )'),
call(24, 0, ' 9.000 BATTERY_VT('),
call(25, 0, ' BATTERY_VT_INDEX: 9,'),
call(26, 0, ' MODULE_VOLTAGE_09: 39064,'),
call(27, 0, ' MODULE_TEMP_09: 11'),
call(28, 0, ' )'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
# Move back to page 3
call(0, 0, 'Received: 19, Discarded: 0, Errors: 0'),
call(1, 0, ' TIMESTAMP MESSAGE ', 'green'),
call(2, 0, ' BATTERY_VT_INDEX: 16,'),
call(3, 0, ' MODULE_VOLTAGE_16: 39064,'),
call(4, 0, ' MODULE_TEMP_16: 11'),
call(5, 0, ' )'),
call(6, 0, ' 17.000 BATTERY_VT('),
call(7, 0, ' BATTERY_VT_INDEX: 17,'),
call(8, 0, ' MODULE_VOLTAGE_17: 39064,'),
call(9, 0, ' MODULE_TEMP_17: 11'),
call(10, 0, ' )'),
call(11, 0, ' 18.000 BATTERY_VT('),
call(12, 0, ' BATTERY_VT_INDEX: 18,'),
call(13, 0, ' MODULE_VOLTAGE_18: 39064,'),
call(14, 0, ' MODULE_TEMP_18: 11'),
call(15, 0, ' )'),
call(16, 0, ' 2.000 BATTERY_VT('),
call(17, 0, ' BATTERY_VT_INDEX: 2,'),
call(18, 0, ' MODULE_VOLTAGE_02: 39064,'),
call(19, 0, ' MODULE_TEMP_02: 11'),
call(20, 0, ' )'),
call(21, 0, ' 3.000 BATTERY_VT('),
call(22, 0, ' BATTERY_VT_INDEX: 3,'),
call(23, 0, ' MODULE_VOLTAGE_03: 39064,'),
call(24, 0, ' MODULE_TEMP_03: 11'),
call(25, 0, ' )'),
call(26, 0, ' 4.000 BATTERY_VT('),
call(27, 0, ' BATTERY_VT_INDEX: 4,'),
call(28, 0, ' MODULE_VOLTAGE_04: 39064,'),
call(29, 0, 'q: Quit, f: Filter, p: Play/Pause, r: Reset ', 'cyan'),
])
if __name__ == '__main__':
unittest.main()
| mit | 939b86bcebfeb0b81905900de58f52f6 | 42.644375 | 104 | 0.382842 | 3.859996 | false | false | false | false |
liampauling/betfair | examples/examplestreaminghistorical.py | 2 | 2645 | import logging
import betfairlightweight
from betfairlightweight import StreamListener
"""
Data needs to be downloaded from:
https://historicdata.betfair.com
"""
# setup logging
logging.basicConfig(level=logging.INFO)
# create trading instance (don't need username/password)
trading = betfairlightweight.APIClient("username", "password")
# create listener
listener = StreamListener(max_latency=None)
# create historical stream (update file_path to your file location)
stream = trading.streaming.create_historical_generator_stream(
file_path="/tmp/BASIC-1.132153978",
listener=listener,
)
# create generator
gen = stream.get_generator()
# print marketBooks
for market_books in gen():
for market_book in market_books:
print(market_book)
# print based on seconds to start
for market_books in gen():
for market_book in market_books:
seconds_to_start = (
market_book.market_definition.market_time - market_book.publish_time
).total_seconds()
if seconds_to_start < 100:
print(market_book.market_id, seconds_to_start, market_book.total_matched)
# print winner details once market is closed
if market_book.status == "CLOSED":
for runner in market_book.runners:
if runner.status == "WINNER":
print(
"{0}: {1} with sp of {2}".format(
runner.status, runner.selection_id, runner.sp.actual_sp
)
)
# record prices to a file
with open("output.txt", "w") as output:
output.write("Time,MarketId,Status,Inplay,SelectionId,LastPriceTraded\n")
for market_books in gen():
for market_book in market_books:
with open("output.txt", "a") as output:
for runner in market_book.runners:
# how to get runner details from the market definition
market_def = market_book.market_definition
runners_dict = {
(runner.selection_id, runner.handicap): runner
for runner in market_def.runners
}
runner_def = runners_dict.get((runner.selection_id, runner.handicap))
output.write(
"%s,%s,%s,%s,%s,%s\n"
% (
market_book.publish_time,
market_book.market_id,
market_book.status,
market_book.inplay,
runner.selection_id,
runner.last_price_traded or "",
)
)
| mit | 3c611b2f0dbefd4da3357d106580b778 | 32.481013 | 85 | 0.58034 | 4.11353 | false | false | false | false |
liampauling/betfair | examples/exampleone.py | 2 | 2709 | import betfairlightweight
from betfairlightweight import filters
# create trading instance
trading = betfairlightweight.APIClient("username", "password", app_key="appKey")
# login
trading.login()
# make event type request to find horse racing event type
horse_racing_event_type_id = trading.betting.list_event_types(
filter=filters.market_filter(text_query="Horse Racing")
)
# returns one result
print(horse_racing_event_type_id)
for event_type in horse_racing_event_type_id:
# prints id, name and market count
print(event_type.event_type.id, event_type.event_type.name, event_type.market_count)
horse_racing_id = event_type.event_type.id
# list all horse racing market catalogues
market_catalogues = trading.betting.list_market_catalogue(
filter=filters.market_filter(
event_type_ids=[horse_racing_id], # filter on just horse racing
market_countries=["GB"], # filter on just GB countries
market_type_codes=["WIN"], # filter on just WIN market types
),
market_projection=[
"MARKET_START_TIME",
"RUNNER_DESCRIPTION",
], # runner description required
max_results=1,
)
print("%s market catalogues returned" % len(market_catalogues))
for market_catalogue in market_catalogues:
# prints market id, market name and market start time
print(
market_catalogue.market_id,
market_catalogue.market_name,
market_catalogue.market_start_time,
)
for runner in market_catalogue.runners:
# prints runner id, runner name and handicap
print(runner.selection_id, runner.runner_name, runner.handicap)
# market book request
market_books = trading.betting.list_market_book(
market_ids=[market_catalogue.market_id],
price_projection=filters.price_projection(
price_data=filters.price_data(ex_all_offers=True)
),
)
for market_book in market_books:
# prints market id, inplay?, status and total matched
print(
market_book.market_id,
market_book.inplay,
market_book.status,
market_book.total_matched,
)
for runner in market_book.runners:
# prints selection id, status and total matched
print(runner.selection_id, runner.status, runner.total_matched)
available_to_back = runner.ex.available_to_back
available_to_lay = runner.ex.available_to_lay
print(available_to_back, available_to_lay)
# logout
trading.logout()
| mit | f1b9a26eb29f0279af00f45e7d11fb8a | 33.291139 | 88 | 0.632337 | 3.91474 | false | false | false | false |
liampauling/betfair | betfairlightweight/endpoints/scores.py | 2 | 4681 | import requests
from typing import Union, List
from .baseendpoint import BaseEndpoint
from .. import resources
from ..utils import clean_locals
class Scores(BaseEndpoint):
"""
Scores operations.
"""
URI = "ScoresAPING/v1.0/"
def list_race_details(
self,
meeting_ids: list = None,
race_ids: list = None,
session: requests.Session = None,
lightweight: bool = None,
) -> Union[list, List[resources.RaceDetails]]:
"""
Search for races to get their details.
:param list meeting_ids: Optionally restricts the results to the specified meeting IDs.
The unique Id for the meeting equivalent to the eventId for that specific race as
returned by listEvents
:param list race_ids: Optionally restricts the results to the specified race IDs. The
unique Id for the race in the format meetingid.raceTime (hhmm). raceTime is in GMT
:param requests.session session: Requests session object
:param bool lightweight: If True will return dict not a resource
:rtype: list[resources.RaceDetails]
"""
params = clean_locals(locals())
method = "%s%s" % (self.URI, "listRaceDetails")
(response, response_json, elapsed_time) = self.request(method, params, session)
return self.process_response(
response_json, resources.RaceDetails, elapsed_time, lightweight
)
# Following requires app key to be authorised
def list_available_events(
self,
event_ids: list = None,
event_type_ids: list = None,
event_status: list = None,
session: requests.Session = None,
lightweight: bool = None,
) -> Union[list, List[resources.AvailableEvent]]:
"""
Search for events that have live score data available.
:param list event_ids: Optionally restricts the results to the specified event IDs
:param list event_type_ids: Optionally restricts the results to the specified event type IDs
:param list event_status: Optionally restricts the results to the specified event status
:param requests.session session: Requests session object
:param bool lightweight: If True will return dict not a resource
:rtype: list[resources.AvailableEvent]
"""
params = clean_locals(locals())
method = "%s%s" % (self.URI, "listAvailableEvents")
(response, response_json, elapsed_time) = self.request(method, params, session)
return self.process_response(
response_json, resources.AvailableEvent, elapsed_time, lightweight
)
def list_scores(
self,
update_keys: list,
session: requests.Session = None,
lightweight: bool = None,
) -> Union[list, List[resources.Score]]:
"""
Returns a list of current scores for the given events.
:param list update_keys: The filter to select desired markets. All markets that match
the criteria in the filter are selected e.g. [{'eventId': '28205674', 'lastUpdateSequenceProcessed': 2}]
:param requests.session session: Requests session object
:param bool lightweight: If True will return dict not a resource
:rtype: list[resources.Score]
"""
params = clean_locals(locals())
method = "%s%s" % (self.URI, "listScores")
(response, response_json, elapsed_time) = self.request(method, params, session)
return self.process_response(
response_json, resources.Score, elapsed_time, lightweight
)
def list_incidents(
self,
update_keys: dict,
session: requests.Session = None,
lightweight: bool = None,
) -> Union[list, List[resources.Incidents]]:
"""
Returns a list of incidents for the given events.
:param dict update_keys: The filter to select desired markets. All markets that match
the criteria in the filter are selected e.g. [{'eventId': '28205674', 'lastUpdateSequenceProcessed': 2}]
:param requests.session session: Requests session object
:param bool lightweight: If True will return dict not a resource
:rtype: list[resources.Incidents]
"""
params = clean_locals(locals())
method = "%s%s" % (self.URI, "listIncidents")
(response, response_json, elapsed_time) = self.request(method, params, session)
return self.process_response(
response_json, resources.Incidents, elapsed_time, lightweight
)
@property
def url(self) -> str:
return "%s%s" % (self.client.api_uri, "scores/json-rpc/v1")
| mit | 21473a0c467e53ccb1d6b5524c6f6cc0 | 38.336134 | 112 | 0.644948 | 4.232369 | false | false | false | false |
liampauling/betfair | betfairlightweight/resources/baseresource.py | 2 | 1242 | import functools
import datetime
from typing import Union, Optional
from ..compat import basestring, integer_types, json, parse_datetime
class BaseResource:
"""Lightweight data structure for resources."""
def __init__(self, **kwargs):
self.elapsed_time = kwargs.pop("elapsed_time", None)
now = datetime.datetime.utcnow()
self._datetime_created = now
self._datetime_updated = now
self._data = kwargs
def json(self) -> str:
return json.dumps(self._data)
@staticmethod
@functools.lru_cache()
def strip_datetime(value: Union[str, int]) -> Optional[datetime.datetime]:
"""
Converts value to datetime if string or int.
"""
if isinstance(value, basestring):
try:
return parse_datetime(value)
except ValueError:
return
elif isinstance(value, integer_types):
try:
return datetime.datetime.utcfromtimestamp(value / 1e3)
except (ValueError, OverflowError, OSError):
return
def __repr__(self) -> str:
return "<%s>" % self.__class__.__name__
def __str__(self) -> str:
return self.__class__.__name__
| mit | 6c95d19fe2b5d0cfb9688923c8d4d5bf | 28.571429 | 78 | 0.585346 | 4.566176 | false | false | false | false |
liampauling/betfair | tests/test_bettingresources.py | 2 | 25166 | import datetime
import unittest
from betfairlightweight import resources
from betfairlightweight.resources.bettingresources import (
LimitOrder,
LimitOnCloseOrder,
MarketOnCloseOrder,
PriceSize,
)
from betfairlightweight.compat import BETFAIR_DATE_FORMAT
from tests.tools import create_mock_json
class BettingResourcesTest(unittest.TestCase):
ELAPSED_TIME = 1.2
def test_event_type_result(self):
mock_response = create_mock_json("tests/resources/list_event_types.json")
event_types = mock_response.json().get("result")
for event_type in event_types:
resource = resources.EventTypeResult(
elapsed_time=self.ELAPSED_TIME, **event_type
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == event_type["marketCount"]
assert resource.event_type.id == event_type["eventType"]["id"]
assert resource.event_type.name == event_type["eventType"]["name"]
def test_competition_result(self):
mock_response = create_mock_json("tests/resources/list_competitions.json")
competitions = mock_response.json().get("result")
for competition in competitions:
resource = resources.CompetitionResult(
elapsed_time=self.ELAPSED_TIME, **competition
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == competition["marketCount"]
assert resource.competition_region == competition["competitionRegion"]
assert resource.competition.id == competition["competition"]["id"]
assert resource.competition.name == competition["competition"]["name"]
def test_time_range_result(self):
mock_response = create_mock_json("tests/resources/list_time_ranges.json")
time_ranges = mock_response.json().get("result")
for time_range in time_ranges:
resource = resources.TimeRangeResult(
elapsed_time=self.ELAPSED_TIME, **time_range
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == time_range["marketCount"]
assert resource.time_range._from == datetime.datetime.strptime(
time_range["timeRange"]["from"], BETFAIR_DATE_FORMAT
)
assert resource.time_range.to == datetime.datetime.strptime(
time_range["timeRange"]["to"], BETFAIR_DATE_FORMAT
)
def test_event_result(self):
mock_response = create_mock_json("tests/resources/list_events.json")
event_results = mock_response.json().get("result")
for event_result in event_results:
resource = resources.EventResult(
elapsed_time=self.ELAPSED_TIME, **event_result
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == event_result["marketCount"]
assert resource.event.id == event_result["event"]["id"]
assert resource.event.open_date == datetime.datetime.strptime(
event_result["event"]["openDate"], BETFAIR_DATE_FORMAT
)
assert resource.event.time_zone == event_result["event"]["timezone"]
assert resource.event.country_code == event_result["event"]["countryCode"]
assert resource.event.name == event_result["event"]["name"]
assert resource.event.venue == event_result["event"]["venue"]
def test_market_type_result(self):
mock_response = create_mock_json("tests/resources/list_market_types.json")
market_type_results = mock_response.json().get("result")
for market_type_result in market_type_results:
resource = resources.MarketTypeResult(
elapsed_time=self.ELAPSED_TIME, **market_type_result
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == market_type_result["marketCount"]
assert resource.market_type == market_type_result["marketType"]
def test_country_result(self):
mock_response = create_mock_json("tests/resources/list_countries.json")
countries_results = mock_response.json().get("result")
for countries_result in countries_results:
resource = resources.CountryResult(
elapsed_time=self.ELAPSED_TIME, **countries_result
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == countries_result["marketCount"]
assert resource.country_code == countries_result["countryCode"]
def test_venue_result(self):
mock_response = create_mock_json("tests/resources/list_venues.json")
venue_results = mock_response.json().get("result")
for venue_result in venue_results:
resource = resources.VenueResult(
elapsed_time=self.ELAPSED_TIME, **venue_result
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_count == venue_result["marketCount"]
assert resource.venue == venue_result["venue"]
def test_market_catalogue(self):
mock_response = create_mock_json("tests/resources/list_market_catalogue.json")
market_catalogues = mock_response.json().get("result")
for market_catalogue in market_catalogues:
resource = resources.MarketCatalogue(
elapsed_time=self.ELAPSED_TIME, **market_catalogue
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == market_catalogue["marketId"]
assert resource.market_name == market_catalogue["marketName"]
assert resource.total_matched == market_catalogue["totalMatched"]
assert resource.market_start_time == datetime.datetime.strptime(
market_catalogue["marketStartTime"], BETFAIR_DATE_FORMAT
)
assert resource.competition.id == market_catalogue["competition"]["id"]
assert resource.competition.name == market_catalogue["competition"]["name"]
assert resource.event.id == market_catalogue["event"]["id"]
assert resource.event.open_date == datetime.datetime.strptime(
market_catalogue["event"]["openDate"], BETFAIR_DATE_FORMAT
)
assert resource.event.time_zone == market_catalogue["event"]["timezone"]
assert (
resource.event.country_code == market_catalogue["event"]["countryCode"]
)
assert resource.event.name == market_catalogue["event"]["name"]
assert resource.event.venue == market_catalogue["event"].get("venue")
assert resource.event_type.id == market_catalogue["eventType"]["id"]
assert resource.event_type.name == market_catalogue["eventType"]["name"]
assert (
resource.description.betting_type
== market_catalogue["description"]["bettingType"]
)
assert (
resource.description.bsp_market
== market_catalogue["description"]["bspMarket"]
)
assert (
resource.description.discount_allowed
== market_catalogue["description"]["discountAllowed"]
)
assert (
resource.description.market_base_rate
== market_catalogue["description"]["marketBaseRate"]
)
assert resource.description.market_time == datetime.datetime.strptime(
market_catalogue["description"]["marketTime"], BETFAIR_DATE_FORMAT
)
assert (
resource.description.market_type
== market_catalogue["description"]["marketType"]
)
assert (
resource.description.persistence_enabled
== market_catalogue["description"]["persistenceEnabled"]
)
assert (
resource.description.regulator
== market_catalogue["description"]["regulator"]
)
assert (
resource.description.rules == market_catalogue["description"]["rules"]
)
assert (
resource.description.rules_has_date
== market_catalogue["description"]["rulesHasDate"]
)
assert resource.description.suspend_time == datetime.datetime.strptime(
market_catalogue["description"]["suspendTime"], BETFAIR_DATE_FORMAT
)
assert (
resource.description.turn_in_play_enabled
== market_catalogue["description"]["turnInPlayEnabled"]
)
assert (
resource.description.wallet == market_catalogue["description"]["wallet"]
)
assert resource.description.each_way_divisor == market_catalogue[
"description"
].get("eachWayDivisor")
assert resource.description.clarifications == market_catalogue[
"description"
].get("clarifications")
assert resource.description.line_range_info.interval == market_catalogue[
"description"
]["lineRangeInfo"].get("interval")
assert resource.description.line_range_info.market_unit == market_catalogue[
"description"
]["lineRangeInfo"].get("marketUnit")
assert (
resource.description.line_range_info.min_unit_value
== market_catalogue["description"]["lineRangeInfo"].get("minUnitValue")
)
assert (
resource.description.line_range_info.max_unit_value
== market_catalogue["description"]["lineRangeInfo"].get("maxUnitValue")
)
assert (
resource.description.price_ladder_description.type
== market_catalogue["description"]["priceLadderDescription"].get("type")
)
assert len(resource.runners) == 10
assert resource.runners[6].handicap == 0.0
assert resource.runners[6].runner_name == "SCR Altach"
assert resource.runners[6].selection_id == 872710
assert resource.runners[6].sort_priority == 7
assert resource.runners[6].metadata == {"runnerId": "872710"}
def test_market_catalogue_no_ero_data(self):
mock_response = create_mock_json(
"tests/resources/list_market_catalogue_no_ero.json"
)
market_catalogues = mock_response.json().get("result")
for market_catalogue in market_catalogues:
resources.MarketCatalogue(
elapsed_time=self.ELAPSED_TIME, **market_catalogue
)
def test_market_book(self):
mock_response = create_mock_json("tests/resources/list_market_book.json")
market_books = mock_response.json().get("result")
for market_book in market_books:
resource = resources.MarketBook(
elapsed_time=self.ELAPSED_TIME, **market_book
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == market_book["marketId"]
assert resource.bet_delay == market_book["betDelay"]
assert resource.bsp_reconciled == market_book["bspReconciled"]
assert resource.complete == market_book["complete"]
assert resource.cross_matching == market_book["crossMatching"]
assert resource.inplay == market_book["inplay"]
assert resource.is_market_data_delayed == market_book["isMarketDataDelayed"]
assert resource.last_match_time == datetime.datetime.strptime(
market_book["lastMatchTime"], BETFAIR_DATE_FORMAT
)
assert (
resource.number_of_active_runners
== market_book["numberOfActiveRunners"]
)
assert resource.number_of_runners == market_book["numberOfRunners"]
assert resource.number_of_winners == market_book["numberOfWinners"]
assert resource.runners_voidable == market_book["runnersVoidable"]
assert resource.status == market_book["status"]
assert resource.total_available == market_book["totalAvailable"]
assert resource.total_matched == market_book["totalMatched"]
assert resource.version == market_book["version"]
assert len(resource.runners) == len(market_book["runners"])
for i, key_line in enumerate(
market_book["keyLineDescription"].get("keyLine", [])
):
assert (
key_line["handicap"]
== resource.key_line_description.key_line[i].handicap
)
assert (
key_line["selectionId"]
== resource.key_line_description.key_line[i].selection_id
)
for i, runner in enumerate(market_book["runners"]):
resource_runner = resource.runners[i]
assert resource_runner.selection_id == runner["selectionId"]
assert resource_runner.status == runner["status"]
assert resource_runner.total_matched == runner.get("totalMatched")
assert resource_runner.adjustment_factor == runner.get(
"adjustmentFactor"
)
assert resource_runner.handicap == runner["handicap"]
assert resource_runner.last_price_traded == runner.get(
"lastPriceTraded"
)
if runner.get("removalDate"):
assert resource_runner.removal_date == datetime.datetime.strptime(
runner["removalDate"], BETFAIR_DATE_FORMAT
)
# else:
# assert resource_runner.sp.near_price == runner['sp']['nearPrice']
# assert resource_runner.sp.far_price == runner['sp']['farPrice']
# assert resource_runner.sp.actual_sp == runner['sp']['actualSP']
# assert resource_runner.sp.back_stake_taken == runner['sp']['backStakeTaken']
# assert resource_runner.sp.lay_liability_taken == runner['sp']['layLiabilityTaken']
#
# assert resource_runner.ex.available_to_back == runner['ex'].get('availableToBack')
# assert resource_runner.ex.available_to_lay == runner['ex'].get('availableToLay')
# assert resource_runner.ex.traded_volume == runner['ex'].get('tradedVolume')
# # print(resource_runner.orders)
# # print(resource_runner.matches)
# # todo complete
def test_price_size(self):
price_size = PriceSize(**{"price": 1.01, "size": 2048})
self.assertEqual(price_size.price, 1.01)
self.assertEqual(price_size.size, 2048)
self.assertEqual(str(price_size), "Price: 1.01 Size: 2048")
def test_match(self):
match = {
"selectionId": 123,
"matchedLays": [{"price": 1.01, "size": 2.00}],
"matchedBacks": [],
}
resource = resources.Match(**match)
self.assertEqual(resource.selection_id, 123)
self.assertEqual(resource.matched_backs, [])
self.assertEqual(resource.matched_lays[0].price, 1.01)
self.assertEqual(resource.matched_lays[0].size, 2.00)
def test_current_orders(self):
mock_response = create_mock_json("tests/resources/list_current_orders.json")
current_orders = mock_response.json().get("result")
resource = resources.CurrentOrders(
elapsed_time=self.ELAPSED_TIME, **current_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert len(resource.orders) == len(current_orders.get("currentOrders"))
for current_order in current_orders.get("currentOrders"):
assert resource.orders[0].bet_id == current_order["betId"]
def test_current_orders_description(self):
mock_response = create_mock_json(
"tests/resources/list_current_orders_description.json"
)
current_orders = mock_response.json().get("result")
resource = resources.CurrentOrders(
elapsed_time=self.ELAPSED_TIME, **current_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert len(resource.orders) == len(current_orders.get("currentOrders"))
for current_order in current_orders.get("currentOrders"):
assert resource.orders[0].bet_id == current_order["betId"]
def test_cleared_orders(self):
mock_response = create_mock_json("tests/resources/list_cleared_orders.json")
cleared_orders = mock_response.json().get("result")
resource = resources.ClearedOrders(
elapsed_time=self.ELAPSED_TIME, **cleared_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert len(resource.orders) == len(cleared_orders.get("clearedOrders"))
for cleared_order in cleared_orders.get("clearedOrders"):
assert resource.orders[0].bet_id == cleared_order["betId"]
# todo complete
def test_market_profit_loss(self):
mock_response = create_mock_json(
"tests/resources/list_market_profit_and_loss.json"
)
market_profits = mock_response.json().get("result")
for market_profit in market_profits:
resource = resources.MarketProfitLoss(
elapsed_time=self.ELAPSED_TIME, **market_profit
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == market_profit["marketId"]
assert resource.commission_applied == market_profit.get("commissionApplied")
assert len(resource.profit_and_losses) == len(
market_profit["profitAndLosses"]
)
# todo complete
def test_limit_order(self):
limit_order = LimitOrder(
1.01,
12,
persistenceType="LIMIT",
timeInForce=True,
minFillSize=2,
betTargetType="BACKERS_PROFIT",
betTargetSize=3,
)
assert limit_order.price == 1.01
assert limit_order.size == 12
assert limit_order.persistence_type == "LIMIT"
assert limit_order.time_in_force is True
assert limit_order.min_fill_size == 2
assert limit_order.bet_target_type == "BACKERS_PROFIT"
assert limit_order.bet_target_size == 3
def test_limit_on_close_order(self):
limit_on_close_order = LimitOnCloseOrder(liability=12, price=100)
assert limit_on_close_order.liability == 12
assert limit_on_close_order.price == 100
def test_market_on_close_order(self):
market_on_close_order = MarketOnCloseOrder(liability=12)
assert market_on_close_order.liability == 12
def test_place_orders(self):
mock_response = create_mock_json("tests/resources/place_orders.json")
place_orders = mock_response.json().get("result")
resource = resources.PlaceOrders(elapsed_time=self.ELAPSED_TIME, **place_orders)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == place_orders["marketId"]
assert resource.status == place_orders["status"]
assert resource.customer_ref == place_orders.get("customerRef")
assert resource.error_code == place_orders.get("errorCode")
assert len(resource.place_instruction_reports) == len(
place_orders.get("instructionReports")
)
for order in place_orders.get("instructionReports"):
assert (
resource.place_instruction_reports[0].size_matched
== order["sizeMatched"]
)
assert resource.place_instruction_reports[0].status == order["status"]
assert resource.place_instruction_reports[0].bet_id == order["betId"]
assert (
resource.place_instruction_reports[0].average_price_matched
== order["averagePriceMatched"]
)
assert resource.place_instruction_reports[
0
].placed_date == datetime.datetime.strptime(
order["placedDate"], BETFAIR_DATE_FORMAT
)
assert resource.place_instruction_reports[0].error_code == order.get(
"errorCode"
)
assert (
resource.place_instruction_reports[0].instruction.selection_id
== order["instruction"]["selectionId"]
)
assert (
resource.place_instruction_reports[0].instruction.side
== order["instruction"]["side"]
)
assert (
resource.place_instruction_reports[0].instruction.order_type
== order["instruction"]["orderType"]
)
assert (
resource.place_instruction_reports[0].instruction.handicap
== order["instruction"]["handicap"]
)
assert (
resource.place_instruction_reports[
0
].instruction.limit_order.persistence_type
== order["instruction"]["limitOrder"]["persistenceType"]
)
assert (
resource.place_instruction_reports[0].instruction.limit_order.price
== order["instruction"]["limitOrder"]["price"]
)
assert (
resource.place_instruction_reports[0].instruction.limit_order.size
== order["instruction"]["limitOrder"]["size"]
)
def test_cancel_orders(self):
mock_response = create_mock_json("tests/resources/cancel_orders.json")
cancel_orders = mock_response.json().get("result")
resource = resources.CancelOrders(
elapsed_time=self.ELAPSED_TIME, **cancel_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == cancel_orders["marketId"]
assert resource.status == cancel_orders["status"]
assert resource.customer_ref == cancel_orders.get("customerRef")
assert resource.error_code == cancel_orders.get("errorCode")
assert len(resource.cancel_instruction_reports) == len(
cancel_orders.get("instructionReports")
)
for order in cancel_orders.get("instructionReports"):
assert (
resource.cancel_instruction_reports[0].size_cancelled
== order["sizeCancelled"]
)
assert resource.cancel_instruction_reports[0].status == order["status"]
assert resource.cancel_instruction_reports[
0
].cancelled_date == datetime.datetime.strptime(
order["cancelledDate"], BETFAIR_DATE_FORMAT
)
assert (
resource.cancel_instruction_reports[0].instruction.bet_id
== order["instruction"]["betId"]
)
assert resource.cancel_instruction_reports[
0
].instruction.size_reduction == order["instruction"].get("sizeReduction")
def test_update_orders(self):
mock_response = create_mock_json("tests/resources/update_orders.json")
update_orders = mock_response.json().get("result")
resource = resources.UpdateOrders(
elapsed_time=self.ELAPSED_TIME, **update_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == update_orders["marketId"]
assert resource.status == update_orders["status"]
assert resource.customer_ref == update_orders.get("customerRef")
assert resource.error_code == update_orders.get("errorCode")
assert len(resource.update_instruction_reports) == len(
update_orders.get("instructionReports")
)
for order in update_orders.get("instructionReports"):
pass
def test_replace_orders(self):
mock_response = create_mock_json("tests/resources/replace_orders.json")
replace_orders = mock_response.json().get("result")
resource = resources.ReplaceOrders(
elapsed_time=self.ELAPSED_TIME, **replace_orders
)
assert resource.elapsed_time == self.ELAPSED_TIME
assert resource.market_id == replace_orders["marketId"]
assert resource.status == replace_orders["status"]
assert resource.customer_ref == replace_orders.get("customerRef")
assert resource.error_code == replace_orders.get("errorCode")
# assert len(resource.instruction_reports) == len(replace_orders.get('instructionReports'))
| mit | 6567e8b1dcee420a4bb4d6f969cb5415 | 44.019678 | 100 | 0.597473 | 4.388143 | false | true | false | false |
liampauling/betfair | betfairlightweight/resources/streamingresources.py | 1 | 14959 | from .baseresource import BaseResource
from .bettingresources import PriceLadderDescription
class MarketDefinitionRunner:
"""
:type adjustment_factor: float
:type id: int
:type removal_date: datetime.datetime
:type sort_priority: int
:type status: unicode
"""
def __init__(
self,
id: int,
sortPriority: int,
status: str,
hc: float = 0,
bsp: float = None,
adjustmentFactor: float = None,
removalDate: str = None,
name: str = None,
):
self.selection_id = id
self.sort_priority = sortPriority
self.status = status
self.handicap = hc
self.bsp = bsp
self.adjustment_factor = adjustmentFactor
self.removal_date = BaseResource.strip_datetime(removalDate)
self.name = name # historic data only
def __str__(self):
return "MarketDefinitionRunner: %s" % self.selection_id
def __repr__(self):
return "<MarketDefinitionRunner>"
class MarketDefinitionKeyLineSelection:
"""
:type selectionId: int
:type handicap: float
"""
def __init__(self, **kwargs):
self.selection_id = kwargs.get("id")
self.handicap = kwargs.get("hc")
class MarketDefinitionKeyLine:
def __init__(self, kl):
self.key_line = [MarketDefinitionKeyLineSelection(**i) for i in kl]
class MarketDefinition:
"""
:type bet_delay: int
:type betting_type: unicode
:type bsp_market: bool
:type bsp_reconciled: bool
:type complete: bool
:type country_code: unicode
:type cross_matching: bool
:type discount_allowed: bool
:type event_id: unicode
:type event_type_id: unicode
:type in_play: bool
:type market_base_rate: float
:type market_time: datetime.datetime
:type market_type: unicode
:type number_of_active_runners: int
:type number_of_winners: int
:type open_date: datetime.datetime
:type persistence_enabled: bool
:type regulators: unicode
:type runners: list[MarketDefinitionRunner]
:type runners_voidable: bool
:type settled_time: datetime.datetime
:type status: unicode
:type suspend_time: datetime.datetime
:type timezone: unicode
:type turn_in_play_enabled: bool
:type venue: unicode
:type version: int
"""
def __init__(
self,
betDelay: int,
bettingType: str,
bspMarket: bool,
bspReconciled: bool,
complete: bool,
crossMatching: bool,
discountAllowed: bool,
eventId: str,
eventTypeId: str,
inPlay: bool,
marketBaseRate: float,
marketTime: str,
numberOfActiveRunners: int,
numberOfWinners: int,
persistenceEnabled: bool,
regulators: str,
runnersVoidable: bool,
status: str,
timezone: str,
turnInPlayEnabled: bool,
version: int,
runners: list,
openDate: str = None,
countryCode: str = None,
eachWayDivisor: float = None,
venue: str = None,
settledTime: str = None,
suspendTime: str = None,
marketType: str = None,
lineMaxUnit: float = None,
lineMinUnit: float = None,
lineInterval: float = None,
name: str = None,
eventName: str = None,
priceLadderDefinition: dict = None,
keyLineDefinition: dict = None,
raceType: str = None,
):
self.bet_delay = betDelay
self.betting_type = bettingType
self.bsp_market = bspMarket
self.bsp_reconciled = bspReconciled
self.complete = complete
self.country_code = countryCode
self.cross_matching = crossMatching
self.discount_allowed = discountAllowed
self.event_id = eventId
self.event_type_id = eventTypeId
self.in_play = inPlay
self.market_base_rate = marketBaseRate
self.market_time = BaseResource.strip_datetime(marketTime)
self.market_type = marketType
self.number_of_active_runners = numberOfActiveRunners
self.number_of_winners = numberOfWinners
self.open_date = BaseResource.strip_datetime(openDate) if openDate else None
self.persistence_enabled = persistenceEnabled
self.regulators = regulators
self.runners_voidable = runnersVoidable
self.settled_time = BaseResource.strip_datetime(settledTime)
self.status = status
self.each_way_divisor = eachWayDivisor
self.suspend_time = BaseResource.strip_datetime(suspendTime)
self.timezone = timezone
self.turn_in_play_enabled = turnInPlayEnabled
self.venue = venue
self.version = version
self.line_max_unit = lineMaxUnit
self.line_min_unit = lineMinUnit
self.line_interval = lineInterval
self.runners = [MarketDefinitionRunner(**i) for i in runners]
self.price_ladder_definition = (
PriceLadderDescription(**priceLadderDefinition)
if priceLadderDefinition
else None
)
self.key_line_definitions = (
MarketDefinitionKeyLine(**keyLineDefinition) if keyLineDefinition else None
)
self.race_type = raceType
self.name = name # historic data only
self.event_name = eventName # historic data only
class Race(BaseResource):
"""
:type market_id: unicode
:type race_id: unicode
:type rpm: dict
:type rcm: dict
"""
def __init__(self, **kwargs):
self.streaming_unique_id = kwargs.pop("streaming_unique_id", None)
self.streaming_update = kwargs.pop("streaming_update", None)
self.streaming_snap = kwargs.pop("streaming_snap", False)
self.publish_time_epoch = kwargs.get("pt")
self.publish_time = self.strip_datetime(kwargs.get("pt"))
super(Race, self).__init__(**kwargs)
self.market_id = kwargs.get("mid")
self.race_id = kwargs.get("id")
self.race_progress = (
RaceProgress(**kwargs["rpc"]) if kwargs.get("rpc") else None
)
self.race_runners = [RaceChange(**runner) for runner in kwargs.get("rrc") or []]
class RaceProgress:
"""
:type publish_time: int
:type feed_time: int
:type race_id: unicode
:type gate: unicode
:type sectional_time: float
:type running_time: float
:type speed: float
:type progress: float
:type order: list
"""
def __init__(self, **kwargs):
self.feed_time_epoch = kwargs.get("ft")
self.feed_time = BaseResource.strip_datetime(kwargs.get("ft"))
self.gate_name = kwargs.get("g")
self.sectional_time = kwargs.get("st")
self.running_time = kwargs.get("rt")
self.speed = kwargs.get("spd")
self.progress = kwargs.get("prg")
self.order = kwargs.get("ord")
self.jumps = kwargs.get("J")
class RaceChange:
"""
:type publish_time: int
:type feed_time: int
:type race_id: unicode
:type selection_id: int
:type lat: float
:type long: float
:type speed: float
:type progress: float
:type stride_frequency: float
"""
def __init__(self, **kwargs):
self.feed_time_epoch = kwargs.get("ft")
self.feed_time = BaseResource.strip_datetime(kwargs.get("ft"))
self.selection_id = kwargs.get("id")
self.lat = kwargs.get("lat")
self.long = kwargs.get("long")
self.speed = kwargs.get("spd")
self.progress = kwargs.get("prg")
self.stride_frequency = kwargs.get("sfq") # in Hz
class CricketFixtureInfo:
"""
:type home_team: unicode
:type away_team: unicode
:type expected_start_time: unicode
:type fixture_status: unicode
:type event_description: unicode
:type max_overs: int
:type event_status: unicode
"""
def __init__(self, **kwargs):
self.home_team = kwargs.get("homeTeam")
self.away_team = kwargs.get("awayTeam")
self.expected_start_time = kwargs.get("expectedStartTime")
self.fixture_status = kwargs.get("fixtureStatus")
self.event_description = kwargs.get("eventDescription")
self.max_overs = kwargs.get("maxOvers")
self.event_status = kwargs.get("eventStatus")
class CricketTeamInfo:
"""
:type name: unicode
:type players: list[CricketPlayerInfo]
:type selection_id: int
"""
def __init__(self, **kwargs):
self.name = kwargs.get("name")
self.players = [
CricketPlayerInfo(**player) for player in kwargs.get("players") or []
]
self.selection_id = kwargs.get("selectionId")
class CricketPlayerInfo:
"""
:type name: unicode
:type is_captain: bool
:type is_wicket_keeper: bool
"""
def __init__(self, **kwargs):
self.name = kwargs.get("name")
self.is_captain = kwargs.get("isCaptain")
self.is_wicket_keeper = kwargs.get("isWicketKeeper")
class CricketMatchStats:
"""
:type current_innings: int
:type toss_result: unicode
:type innings_stats: list[CricketInningsStats]
:type batting_team_stats: CricketBattingTeamStats
:type bowling_team_stats: CricketBowlingTeamStats
"""
def __init__(self, **kwargs):
self.current_innings = kwargs.get("currentInnings")
self.toss_result = kwargs.get("tossResult")
self.innings_stats = [
CricketInningsStats(**innings)
for innings in kwargs.get("inningsStats") or []
]
self.batting_team_stats = (
CricketBattingTeamStats(**kwargs["battingTeamStats"])
if kwargs.get("battingTeamStats")
else None
)
self.bowling_team_stats = (
CricketBowlingTeamStats(**kwargs["bowlingTeamStats"])
if kwargs.get("bowlingTeamStats")
else None
)
class CricketInningsStats:
"""
:type innings_num: int
:type batting_team: unicode
:type bowling_team: unicode
:type innings_runs: int
:type innings_overs: unicode
:type innings_wickets: int
"""
def __init__(self, **kwargs):
self.innings_num = kwargs.get("inningsNum")
self.batting_team = kwargs.get("battingTeam")
self.bowling_team = kwargs.get("bowlingTeam")
self.innings_runs = kwargs.get("inningsRuns")
self.innings_overs = kwargs.get("inningsOvers")
self.innings_wickets = kwargs.get("inningsWickets")
class CricketBattingTeamStats:
"""
:type team_name: unicode
:type bat_1_name: int
:type bat_1_runs: int
:type bat_1_balls: int
:type bat_1_fours: int
:type bat_1_sixes: int
:type bat_1_strike: int
:type bat_2_name: int
:type bat_2_runs: int
:type bat_2_balls: int
:type bat_2_fours: int
:type bat_2_sixes: int
:type bat_2_strike: int
"""
def __init__(self, **kwargs):
self.team_name = kwargs.get("teamName")
self.bat_1_name = kwargs.get("bat1Name")
self.bat_1_runs = kwargs.get("bat1Runs")
self.bat_1_balls = kwargs.get("bat1Balls")
self.bat_1_fours = kwargs.get("bat1Fours")
self.bat_1_sixes = kwargs.get("bat1Sixes")
self.bat_1_strike = kwargs.get("bat1Strike")
self.bat_2_name = kwargs.get("bat2Name")
self.bat_2_runs = kwargs.get("bat2Runs")
self.bat_2_balls = kwargs.get("bat2Balls")
self.bat_2_fours = kwargs.get("bat2Fours")
self.bat_2_sixes = kwargs.get("bat2Sixes")
self.bat_2_strike = kwargs.get("bat2Strike")
class CricketBowlingTeamStats:
"""
:type team_name
:type bowl_1_name: unicode
:type bowl_1_overs: unicode
:type bowl_1_runs: int
:type bowl_1_maidens: int
:type bowl_1_wickets: int
:type bowl_2_name: unicode
:type bowl_2_overs: unicode
:type bowl_2_runs: int
:type bowl_2_maidens: int
:type bowl_2_wickets: int
"""
def __init__(self, **kwargs):
self.team_name = kwargs.get("teamName")
self.bowl_1_name = kwargs.get("bowl1Name")
self.bowl_1_overs = kwargs.get("bowl1Overs")
self.bowl_1_runs = kwargs.get("bowl1Runs")
self.bowl_1_maidens = kwargs.get("bowl1Maidens")
self.bowl_1_wickets = kwargs.get("bowl1Wickets")
self.bowl_2_name = kwargs.get("bowl2Name")
self.bowl_2_overs = kwargs.get("bowl2Overs")
self.bowl_2_runs = kwargs.get("bowl2Runs")
self.bowl_2_maidens = kwargs.get("bowl2Maidens")
self.bowl_2_wickets = kwargs.get("bowl2Wickets")
class IncidentListWrapper:
"""
:type incident_list: list[CricketIncident]
"""
def __init__(self, **kwargs):
self.incident_list = [
CricketIncident(**incident) for incident in kwargs.get("incidentList") or []
]
class CricketIncident:
"""
:type participant_ref: unicode
:type incident_type: unicode
:type value: unicode
:type innings: int
:type overs: unicode
:type actual_time: int
"""
def __init__(self, **kwargs):
self.participant_ref = kwargs.get("participantRef")
self.incident_type = kwargs.get("incidentType")
self.value = kwargs.get("value")
self.innings = kwargs.get("innings")
self.overs = kwargs.get("overs")
self.actual_time = kwargs.get("actualTime")
class CricketMatch(BaseResource):
"""
:type event_id: unicode
:type market_id: unicode
:type fixture_info: CricketFixtureInfo
:type home_team: CricketTeamInfo
:type away_team: CricketTeamInfo
:type match_stats: CricketMatchStats
:type incident_list_wrapper: IncidentListWrapper
"""
def __init__(self, **kwargs):
self.streaming_unique_id = kwargs.pop("streaming_unique_id", None)
self.streaming_update = kwargs.pop("streaming_update", None)
self.streaming_snap = kwargs.pop("streaming_snap", False)
self.publish_time_epoch = kwargs.get("pt")
self.publish_time = self.strip_datetime(kwargs.get("pt"))
super(CricketMatch, self).__init__(**kwargs)
self.event_id = kwargs.get("eventId")
self.market_id = kwargs.get("marketId")
self.fixture_info = (
CricketFixtureInfo(**kwargs["fixtureInfo"])
if kwargs.get("fixtureInfo")
else None
)
self.home_team = (
CricketTeamInfo(**kwargs["homeTeam"]) if kwargs.get("homeTeam") else None
)
self.away_team = (
CricketTeamInfo(**kwargs["awayTeam"]) if kwargs.get("awayTeam") else None
)
self.match_stats = (
CricketMatchStats(**kwargs["matchStats"])
if kwargs.get("matchStats")
else None
)
self.incident_list_wrapper = (
IncidentListWrapper(**kwargs["incidentListWrapper"])
if kwargs.get("incidentListWrapper")
else None
)
| mit | cee5bf23e91f11fe78a972cc1b538333 | 30.492632 | 88 | 0.612608 | 3.506564 | false | false | false | false |
liampauling/betfair | setup.py | 2 | 1423 | import os
from setuptools import setup
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, "requirements.txt")) as f:
INSTALL_REQUIRES = f.read().splitlines()
with open(os.path.join(here, "requirements-speed.txt")) as f:
extras_require = f.read().splitlines()
EXTRAS_REQUIRE = {
"speed": extras_require,
}
about = {}
with open(os.path.join(here, "betfairlightweight", "__version__.py"), "r") as f:
exec(f.read(), about)
setup(
name=about["__title__"],
version=about["__version__"],
packages=[
"betfairlightweight",
"betfairlightweight.endpoints",
"betfairlightweight.resources",
"betfairlightweight.streaming",
],
package_dir={"betfairlightweight": "betfairlightweight"},
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRE,
url=about["__url__"],
license=about["__license__"],
author=about["__author__"],
author_email="a@unknown.invalid",
description=about["__description__"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
test_suite="tests",
)
| mit | 2c6a34831ac1ddcb5b82b2e10125ce30 | 29.934783 | 80 | 0.616304 | 3.639386 | false | false | true | false |
liampauling/betfair | betfairlightweight/resources/racecardresources.py | 2 | 10419 | from .baseresource import BaseResource
class RaceType:
"""
:type abbr: unicode
:type full: unicode
:type key: unicode
"""
def __init__(self, abbr, full, key):
self.abbr = abbr
self.full = full
self.key = key
class RaceClassification:
"""
:type classification: unicode
:type classification_abbr: unicode
:type code: unicode
:type display_name: unicode
:type display_name_abbr: unicode
"""
def __init__(
self,
code=None,
displayName=None,
displayNameAbbr=None,
classification=None,
classificationAbbr=None,
):
self.classification = classification
self.classification_abbr = classificationAbbr
self.code = code
self.display_name = displayName
self.display_name_abbr = displayNameAbbr
class Market:
"""
:type market_id: unicode
:type market_type: unicode
:type number_of_winners: int
"""
def __init__(self, marketId, marketType, numberOfWinners):
self.market_id = marketId
self.market_type = marketType
self.number_of_winners = numberOfWinners
class Going:
"""
:type abbr: unicode
:type full: unicode
:type key: unicode
"""
def __init__(self, abbr=None, full=None, key=None):
self.abbr = abbr
self.full = full
self.key = key
class Course:
"""
:type country: unicode
:type country_code: unicode
:type course_id: unicode
:type course_type: unicode
:type name: unicode
:type surface_type: unicode
:type timeform_course_code: unicode
:type timezone: unicode
"""
def __init__(
self,
country,
countryCode,
courseId,
name,
surfaceType=None,
timeformCourseCode=None,
timezone=None,
courseType=None,
):
self.country = country
self.country_code = countryCode
self.course_id = courseId
self.course_type = courseType
self.name = name
self.surface_type = surfaceType
self.timeform_course_code = timeformCourseCode
self.timezone = timezone
class Race(BaseResource):
"""
:type betfair_meeting_id: unicode
:type course: Course
:type distance: int
:type eligibility: unicode
:type going: Going
:type is_results_available: bool
:type markets: list[Market]
:type meeting_going: unicode
:type meeting_id: unicode
:type number_of_runners: int
:type race_class: int
:type race_classification: RaceClassification
:type race_id: unicode
:type race_title: unicode
:type race_type: RaceType
:type start_date: datetime.datetime
"""
def __init__(self, **kwargs):
super(Race, self).__init__(**kwargs)
self.betfair_meeting_id = kwargs.get("betfairMeetingId")
self.distance = kwargs.get("distance")
self.eligibility = kwargs.get("eligibility")
self.is_result_available = kwargs.get("isResultAvailable")
self.meeting_going = kwargs.get("meetingGoing")
self.meeting_id = kwargs.get("meetingId")
self.number_of_runners = kwargs.get("numberOfRunners")
self.race_class = kwargs.get("raceClass")
self.race_id = kwargs.get("raceId")
self.race_id_exchange = kwargs.get("raceIdExchange")
self.race_title = kwargs.get("raceTitle")
self.start_date = (
self.strip_datetime(kwargs.get("startDate"))
if "startDate" in kwargs
else None
)
self.course = Course(**kwargs.get("course")) if "course" in kwargs else None
self.going = Going(**kwargs.get("going")) if "going" in kwargs else None
self.markets = [Market(**i) for i in kwargs.get("markets") or []]
self.race_classification = (
RaceClassification(**kwargs.get("raceClassification"))
if "raceClassification" in kwargs
else None
)
self.race_type = (
RaceType(**kwargs.get("raceType")) if "raceType" in kwargs else None
)
class InPlayHint:
"""
:type hint_name: unicode
:type hint_value: unicode
"""
def __init__(self, hintName, hintValue):
self.hint_name = hintName
self.hint_value = hintValue
class DaysSinceLastRun:
"""
:type days: int
:type type: unicode
"""
def __init__(self, days, type):
self.days = days
self.type = type
class Jockey:
"""
:type jockey_id: unicode
:type name: unicode
"""
def __init__(self, jockeyId, name, allowance=None):
self.jockey_id = jockeyId
self.name = name
self.allowance = allowance
class Selection:
"""
:type market_id: unicode
:type market_type: unicode
:type selection_id: unicode
"""
def __init__(self, marketId=None, marketType=None, selectionId=None, bsp=None):
self.market_id = marketId
self.market_type = marketType
self.selection_id = selectionId
self.bsp = bsp
class Trainer:
"""
:type location: unicode
:type name: unicode
:type trainer_id: unicode
"""
def __init__(self, location, name, trainerId):
self.location = location
self.name = name
self.trainer_id = trainerId
class Wearing:
"""
:type abbr: unicode
:type full: unicode
:type key: unicode
"""
def __init__(self, abbr, full, key):
self.abbr = abbr
self.full = full
self.key = key
class PreRaceMasterRating:
def __init__(self, ratingSymbol=None, ratingValue=None, **kwargs):
self.rating_symbol = ratingSymbol
self.rating_value = ratingValue
class PreRaceWeightAdjustedMasterRating:
def __init__(self, ratingValue=None, **kwargs):
self.rating_value = ratingValue
class SeasonMasterRating:
def __init__(
self,
racingType=None,
surface=None,
ratingSymbol=None,
ratingvalue=None,
**kwargs
):
self.racing_type = racingType
self.surface = surface
self.rating_symbol = ratingSymbol
self.rating_value = ratingvalue
class Runner:
"""
:type age: int
:type comment: unicode
:type days_since_last_run: DaysSinceLastRun
:type draw: int
:type gender: unicode
:type horse_id: unicode
:type is_non_runner: bool
:type jockey: Jockey
:type long_handicap: int
:type name: unicode
:type official_rating: int
:type owner_colours: unicode
:type recent_form: unicode
:type saddle_cloth: unicode
:type selections: list[Selection]
:type star_rating: int
:type timeform_123_place: int
:type trainer: Trainer
:type wearing: Wearing
:type weight: int
:type wins_at_course: int
:type wins_at_course_and_distance: int
:type wins_at_distance: int
"""
def __init__(
self,
age=None,
draw=None,
gender=None,
horseId=None,
longHandicap=None,
name=None,
ownerColours=None,
saddleCloth=None,
weight=None,
selections=None,
trainer=None,
jockey=None,
starRating=None,
comment=None,
isNonRunner=False,
winsAtCourse=None,
winsAtCourseAndDistance=None,
winsAtDistance=None,
daysSinceLastRun=None,
timeform123Place=None,
officialRating=None,
recentForm=None,
wearing=None,
preRaceMasterRating=None,
preRaceWeightAdjustedMasterRating=None,
seasonMasterRating=None,
**kwargs
):
self.age = age
self.comment = comment
self.draw = draw
self.gender = gender
self.horse_id = horseId
self.is_non_runner = isNonRunner
self.long_handicap = longHandicap
self.name = name
self.official_rating = officialRating
self.owner_colours = ownerColours
self.recent_form = recentForm
self.saddle_cloth = saddleCloth
self.star_rating = starRating
self.timeform_123_place = timeform123Place
self.weight = weight
self.wins_at_course = winsAtCourse
self.wins_at_course_and_distance = winsAtCourseAndDistance
self.wins_at_distance = winsAtDistance
self.days_since_last_run = (
[DaysSinceLastRun(**i) for i in daysSinceLastRun]
if daysSinceLastRun
else []
)
self.jockey = Jockey(**jockey) if jockey else None
self.selections = [Selection(**i) for i in selections] if selections else []
self.trainer = Trainer(**trainer) if trainer else None
self.wearing = Wearing(**wearing) if wearing else None
self.pre_race_master_rating = (
PreRaceMasterRating(**preRaceMasterRating) if preRaceMasterRating else None
)
self.pre_race_weight_adjusted_master_rating = (
PreRaceWeightAdjustedMasterRating(**preRaceWeightAdjustedMasterRating)
if preRaceWeightAdjustedMasterRating
else None
)
self.season_master_rating = (
SeasonMasterRating(**seasonMasterRating) if seasonMasterRating else None
)
class RaceCard(BaseResource):
"""
:type betting_forecast_text: unicode
:type comment: unicode
:type in_play_hints: InPlayHint
:type minimum_weight: int
:type number_of_non_runners: int
:type number_of_runners: int
:type prize: unicode
:type race: Race
:type runners: list[Runner]
:type timeform_123_text: unicode
"""
def __init__(self, **kwargs):
super(RaceCard, self).__init__(**kwargs)
self.betting_forecast_text = kwargs.get("bettingForecastText")
self.comment = kwargs.get("comment")
self.minimum_weight = kwargs.get("minimumWeight")
self.number_of_non_runners = kwargs.get("numberOfNonRunners")
self.number_of_runners = kwargs.get("numberOfRunners")
self.prize = kwargs.get("prize")
self.timeform_123_text = kwargs.get("timeform123Text")
self.in_play_hints = [InPlayHint(**i) for i in kwargs.get("inPlayHints") or []]
self.race = Race(**kwargs.get("race"))
self.race_id_exchange = kwargs.get("raceIdExchange")
self.runners = [Runner(**i) for i in kwargs.get("runners")]
| mit | 9b05f1cbce30d32864e0c0f6b4709f9d | 26.858289 | 87 | 0.610519 | 3.743802 | false | false | false | false |
missionpinball/mpf-mc | mpfmc/widgets/ellipse.py | 1 | 2531 | """An ellipse widget."""
from typing import Optional
from kivy.graphics.vertex_instructions import Ellipse as KivyEllipse
from kivy.graphics.context_instructions import Color, Rotate, Scale
from kivy.properties import NumericProperty
from mpfmc.uix.widget import Widget
MYPY = False
if MYPY: # pragma: no cover
from mpfmc.core.mc import MpfMc # pylint: disable-msg=cyclic-import,unused-import
class Ellipse(Widget):
"""An ellipse widget."""
widget_type_name = 'Ellipse'
animation_properties = ('x', 'y', 'width', 'pos', 'height', 'size', 'color',
'angle_start', 'angle_end', 'opacity', 'rotation', 'scale')
merge_settings = ('width', 'height')
def __init__(self, mc: "MpfMc", config: dict, key: Optional[str] = None, **kwargs) -> None:
del kwargs
super().__init__(mc=mc, config=config, key=key)
# Bind to all properties that when changed need to force
# the widget to be redrawn
self.bind(pos=self._draw_widget,
size=self._draw_widget,
color=self._draw_widget,
rotation=self._draw_widget,
scale=self._draw_widget,
segments=self._draw_widget,
angle_start=self._draw_widget,
angle_end=self._draw_widget)
self._draw_widget()
def _draw_widget(self, *args) -> None:
del args
if self.canvas is None:
return
anchor = (self.x - self.anchor_offset_pos[0], self.y - self.anchor_offset_pos[1])
self.canvas.clear()
with self.canvas:
Color(*self.color)
Rotate(angle=self.rotation, origin=anchor)
Scale(self.scale).origin = anchor
KivyEllipse(pos=self.pos, size=self.size,
segments=self.segments,
angle_start=self.angle_start,
angle_end=self.angle_end)
#
# Properties
#
segments = NumericProperty(180)
'''Defines how many segments will be used for drawing the ellipse. The
drawing will be smoother if you have many segments.
'''
angle_start = NumericProperty(0)
'''Specifies the starting angle, in degrees, of the disk portion of
the ellipse.
'''
angle_end = NumericProperty(360)
'''Specifies the ending angle, in degrees, of the disk portion of
the ellipse.
'''
rotation = NumericProperty(0)
scale = NumericProperty(1.0)
widget_classes = [Ellipse]
| mit | 6f28fb22b92ae67e012ea9da55e757c2 | 30.246914 | 95 | 0.596997 | 3.917957 | false | false | false | false |
missionpinball/mpf-mc | mpfmc/core/keyboard.py | 1 | 7430 | """Handles key strokes in the media manager."""
import logging
from kivy.core.window import Window
from kivy.uix.widget import Widget as KivyWidget
class Keyboard(KivyWidget):
"""Handles key strokes in the media manager."""
def __init__(self, mc, **kwargs):
super().__init__(**kwargs)
self.mc = mc
self.log = logging.getLogger("Keyboard")
self.keyboard_events = list()
self.key_map = dict()
self.toggle_keys = set()
self.inverted_keys = list()
self.active_keys = dict()
self.debug = False
# todo need to change the parsing logic to make these work
self.special_keys = {
'equals': '=',
'minus': '-',
'dash': '-',
'leftbracket': '[',
'rightbracket': ']',
'backslash': '\\',
'apostrophe': "'",
'semicolon': ';',
'colon': ':',
'comma': ',',
'period': '.',
'slash': '/',
'question': '?',
}
self.keyboard = Window.request_keyboard(callback=None, target=self)
self.keyboard.bind(on_key_down=self._on_keyboard_down,
on_key_up=self._on_keyboard_up)
for k, v in self.mc.machine_config['keyboard'].items():
if k == 'debug' and v:
self.debug = True
continue
k = str(k) # k is the value of the key entry in the config
switch_name = v.get('switch', None)
# set whether a key is the push on / push off type
toggle_key = v.get('toggle', None)
invert = v.get('invert', None)
event = v.get('event', None)
mc_event = v.get('mc_event', None)
params = v.get('params', None)
# todo add args processing?
# Process the key map entry
k = k.replace('+', '-').lower().split('-')
key = k[-1]
mods = k[:-1]
if mods:
mods = sorted(mods)
# What happens when it's pressed?
if switch_name: # We're processing a key entry for a switch
if invert:
self.inverted_keys.append(switch_name)
self.add_key_map(key, mods, switch_name, toggle_key)
elif event: # we're processing an entry for an event
event_dict = {'event': event, 'params': params}
self.add_key_map(key, mods, event_dict=event_dict)
elif mc_event: # we're processing an entry for an mc_event
event_dict = {'mc_event': mc_event, 'params': params}
self.add_key_map(key, mods, event_dict=event_dict)
@staticmethod
def get_key_string(key: str, mods: [str]) -> str:
"""Return string for key + modifiers.
Args:
key: string of key
mods: list of modifiers as string
Returns: string
"""
try:
mods = sorted(mods)
except AttributeError:
pass
return '{}-{}'.format(key, '-'.join(mods))
# pylint: disable-msg=too-many-arguments
def add_key_map(self, key, mods, switch_name=None, toggle_key=False,
event_dict=None):
"""Add an entry to the key_map which is used to see what to do when key events are received.
Args:
key: The character or name of the key
mods: List of strings for modifier keys for this entry
switch_name: String name of the switch this key combination is tied
to.
toggle_key: Boolean as to whether this key should be a toggle key.
(i.e. push on / push off).
event_dict: Dictionary of events with parameters that will be
posted when this key combination is pressed. Default is None.
"""
key_string = self.get_key_string(key, mods)
if switch_name:
self.key_map[key_string] = switch_name
elif event_dict:
self.key_map[key_string] = event_dict
if toggle_key:
self.toggle_keys.add(key_string)
def _on_keyboard_up(self, keyboard, keycode):
del keyboard
key = keycode[1]
self.process_key_release(key)
return True
def _on_keyboard_down(self, keyboard, keycode, text, modifiers):
del keyboard
del text
key = keycode[1]
# only consider numlock modifier for keys on the numlock field
if "numlock" in modifiers and not key.startswith("num"):
modifiers.remove("numlock")
if self.debug:
if modifiers:
print("KEYS: {}+{}".format('+'.join(modifiers), key))
else:
print("KEYS: {}".format(key))
if key in self.active_keys:
return True
else:
return self.process_key_down(key, modifiers)
def process_key_down(self, key, mods):
"""Process a key down event and change switches accordingly."""
key_string = self.get_key_string(key, mods)
self.log.debug("Processing key stroke for key %s", key_string)
if key_string not in self.key_map:
return False
if key_string in self.toggle_keys: # is this is a toggle key?
self.active_keys[key] = None
self.send_switch(state=-1, name=self.key_map[key_string])
else:
# do we have an event or a switch?
if isinstance(self.key_map[key_string], str): # switch
if self.key_map[key_string] in self.inverted_keys:
self.send_switch(state=0, name=self.key_map[key_string])
self.active_keys[key] = ''.join(('-',
self.key_map[key_string]))
else:
self.send_switch(state=1, name=self.key_map[key_string])
self.active_keys[key] = self.key_map[key_string]
elif isinstance(self.key_map[key_string], dict): # event
event_dict = self.key_map[key_string]
event_params = event_dict['params'] or {}
if 'event' in event_dict:
self.mc.post_mc_native_event(str(event_dict['event']),
**event_params)
elif 'mc_event' in event_dict:
self.log.warning("The keyboard setting 'mc_event:' will "
"go away soon since now regular 'event:'"
"entries are posted to the MC and MPF.")
# todo deprecate this
self.mc.events.post(event_dict['mc_event'],
**event_params)
return True
def process_key_release(self, key):
"""Process a key up event and change switches accordingly."""
action = self.active_keys.pop(key, None)
if action:
if action.startswith('-'):
self.send_switch(state=1, name=action[1:])
else:
self.send_switch(state=0, name=action)
def send_switch(self, name, state):
"""Notify mpf via BCP about a switch change."""
if self.mc.bcp_client_connected:
self.mc.bcp_processor.send('switch', name=name, state=state)
| mit | b9f0a016e1466149caf419cfa857407e | 34.550239 | 100 | 0.520996 | 4.176504 | false | false | false | false |
missionpinball/mpf-mc | mpfmc/core/bcp_server.py | 1 | 9542 | """BCP Server interface for the MPF Media Controller"""
import logging
import queue
import socket
import sys
import threading
import time
import traceback
import select
import mpf.core.bcp.bcp_socket_client as bcp
from mpf.exceptions.runtime_error import MpfRuntimeError
class BCPServer(threading.Thread):
"""Parent class for the BCP Server thread.
Args:
mc: A reference to the main MediaController instance.
receiving_queue: A shared Queue() object which holds incoming BCP
commands.
sending_queue: A shared Queue() object which holds outgoing BCP
commands.
"""
def __init__(self, mc, receiving_queue, sending_queue):
threading.Thread.__init__(self)
self.mc = mc
self.log = logging.getLogger('MPF-MC BCP Server')
self.receive_queue = receiving_queue
self.sending_queue = sending_queue
self.connection = None
self.socket = None
self.done = False
self.setup_server_socket(mc.machine_config['mpf-mc']['bcp_interface'],
mc.machine_config['mpf-mc']['bcp_port'])
self.sending_thread = threading.Thread(target=self.sending_loop)
self.sending_thread.daemon = True
self.sending_thread.start()
def setup_server_socket(self, interface='localhost', port=5050):
"""Sets up the socket listener.
Args:
interface: String name of which interface this socket will listen
on.
port: Integer TCP port number the socket will listen on.
"""
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.log.info('Starting up on %s port %s', interface, port)
try:
self.socket.bind((interface, port))
except IOError as e:
raise MpfRuntimeError("Failed to bind BCP Socket to {} on port {}. "
"Is there another application running on that port?".format(interface, port), 1,
self.log.name) from e
self.socket.listen(5)
self.socket.settimeout(1)
def _connect(self):
self.log.info("Waiting for a connection...")
# Since posting an event from a thread is not safe, we just
# drop the event we want into the receive queue and let the
# main loop pick it up
self.receive_queue.put(('trigger',
{'name': 'client_disconnected',
'host': self.socket.getsockname()[0],
'port': self.socket.getsockname()[1]}))
'''event: client_disconnected
desc: Posted on the MPF-MC only (e.g. not in MPF) when the BCP
client disconnects. This event is also posted when the MPF-MC
starts before a client is connected.
This is useful for triggering a slide notifying of the
disconnect.
args:
host: The hostname or IP address that the socket is listening
on.
port: The port that the socket is listening on.
'''
self.mc.bcp_client_connected = False
start_time = time.time()
while (not self.connection and
not self.mc.thread_stopper.is_set()):
try:
self.connection, client_address = self.socket.accept()
except (socket.timeout, OSError):
if self.mc.options['production'] and start_time + 30 < time.time():
self.log.warning("Timeout while waiting for connection. Stopping!")
self.mc.stop()
return False
if self.mc.thread_stopper.is_set():
self.log.info("Stopping BCP listener thread")
return False
self.log.info("Received connection from: %s:%s",
client_address[0], client_address[1])
# Since posting an event from a thread is not safe, we just
# drop the event we want into the receive queue and let the
# main loop pick it up
self.receive_queue.put(('trigger',
{'name': 'client_connected',
'host': client_address[0],
'port': client_address[1]}))
'''event: client_connected
desc: Posted on the MPF-MC only when a BCP client has
connected.
args:
address: The IP address of the client that connected.
port: The port the client connected on.
'''
self.mc.bcp_client_connected = True
return True
def run(self):
"""The socket thread's run loop."""
try:
while not self.mc.thread_stopper.is_set():
if not self._connect():
return
socket_chars = b''
if sys.platform in ("linux", "darwin"):
poller = select.poll()
poller.register(self.connection, select.POLLIN)
# Receive the data in small chunks and retransmit it
while not self.mc.thread_stopper.is_set():
if sys.platform in ("linux", "darwin"):
ready = poller.poll(None)
else:
ready = select.select([self.connection], [], [], 1)
if ready[0]:
try:
data_read = self.connection.recv(8192)
except socket.timeout:
pass
if data_read:
socket_chars += data_read
commands = socket_chars.split(b"\n")
# keep last incomplete command
socket_chars = commands.pop()
# process all complete commands
self._process_receives_messages(commands)
else:
# no bytes -> socket closed
break
# close connection. while loop will not exit if this is not intended.
self.connection.close()
self.connection = None
# always exit
self.mc.stop()
return
except Exception: # noqa
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value,
exc_traceback)
msg = ''.join(line for line in lines)
self.mc.crash_queue.put(msg)
def _process_receives_messages(self, commands):
# process all complete commands
for cmd in commands:
if cmd:
try:
decoded_cmd = cmd.strip().decode()
except UnicodeDecodeError:
self.log.warning("Failed to decode BCP message: %s", cmd.strip())
continue
self.process_received_message(decoded_cmd)
def stop(self):
""" Stops and shuts down the BCP server."""
if not self.done:
self.log.info("Socket thread stopping.")
self.sending_queue.put('goodbye', None)
time.sleep(1) # give it a chance to send goodbye before quitting
self.done = True
self.mc.done = True
def sending_loop(self):
"""Sending loop which transmits data from the sending queue to the
remote socket.
This method is run as a thread.
"""
try:
while not self.done and not self.mc.thread_stopper.is_set():
try:
msg, rawbytes = self.sending_queue.get(block=True,
timeout=1)
except queue.Empty:
if self.mc.thread_stopper.is_set():
self.log.info("Stopping BCP sending thread")
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
self.socket = None
return
else:
continue
if not rawbytes:
self.connection.sendall(('{}\n'.format(msg)).encode('utf-8'))
else:
self.connection.sendall('{}&bytes={}\n'.format(
msg, len(rawbytes)).encode('utf-8'))
self.connection.sendall(rawbytes)
except Exception: # noqa
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value,
exc_traceback)
msg = ''.join(line for line in lines)
self.mc.crash_queue.put(msg)
# todo this does not crash mpf-mc
def process_received_message(self, message):
"""Puts a received BCP message into the receiving queue.
Args:
message: The incoming BCP message
"""
self.log.debug('Received "%s"', message)
try:
cmd, kwargs = bcp.decode_command_string(message)
self.receive_queue.put((cmd, kwargs))
except ValueError:
self.log.error("DECODE BCP ERROR. Message: %s", message)
raise
| mit | b3022f558c6d7a666f325ba1d39bcdcd | 35.7 | 114 | 0.521798 | 4.751992 | false | false | false | false |
missionpinball/mpf-mc | mpfmc/effects/color_dmd.py | 1 | 3949 | from typing import List
from kivy.properties import NumericProperty, ListProperty, BooleanProperty
from mpfmc.uix.effects import EffectsChain
from mpfmc.effects.dot_filter import DotFilterEffect
from mpfmc.effects.gain import GainEffect
from mpfmc.effects.reduce import ReduceEffect
MYPY = False
if MYPY: # pragma: no cover
from kivy.uix.effectwidget import EffectBase # pylint: disable-msg=cyclic-import,unused-import
class ColorDmdEffect(EffectsChain):
"""GLSL effect to render an on-screen DMD to look like individual round pixels."""
dot_filter = BooleanProperty(True)
'''
Sets whether or not to apply the dot filter effect.
dot_filter is a :class:`~kivy.properties.BooleanProperty` and
defaults to True.
'''
width = NumericProperty(128)
'''
Sets the width in pixels of the display widget on the screen. Typically
this is larger than the dots_x parameter.
width is a :class:`~kivy.properties.NumericProperty` and
defaults to 128.
'''
height = NumericProperty(32)
'''
Sets the height in pixels of the display widget on the screen. Typically
this is larger than the dots_y parameter.
height is a :class:`~kivy.properties.NumericProperty` and
defaults to 32.
'''
dots_x = NumericProperty(128)
'''
Sets the number of dots in the horizontal (x) direction.
dots_x is a :class:`~kivy.properties.NumericProperty` and
defaults to 128.
'''
dots_y = NumericProperty(32)
'''
Sets the number of dots in the vertical (y) direction.
dots_y is a :class:`~kivy.properties.NumericProperty` and
defaults to 32.
'''
blur = NumericProperty(0.1)
'''
Sets the size of the blur around each pixel where it's blended with
the background. The value is relative to the pixel. (e.g. a value of
0.1 will add a 10% blur around the edge of each pixel.)
blur is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.1.
'''
dot_size = NumericProperty(0.5)
'''
Sets the size of the circle for the dot/pixel relative to the size of the
square bounding box of the dot. A size of 1.0 means that the diameter
of the dot will be the same as its bounding box, in other words a
size of 1.0 means that the dot will touch each other.
dot_size is a :class:`~kivy.properties.NumericProperty` and
defaults to 0.5.
'''
background_color = ListProperty([0.1, 0.1, 0.1, 1.0])
'''
A four-item tuple or list that represents the color of the space between the
dots, in RGBA format with individual values as floats between 0.0 - 1.0. If
you want the background to be transparent, set it to (0.0, 0.0, 0.0, 0.0).
background_color is a :class:`~kivy.properties.ListProperty` and
defaults to (0.1, 0.1, 0.1, 1.0) (which is 10% gray with 100% alpha/fully
opaque).
'''
gain = NumericProperty(1.0)
'''
Sets the gain factor which is multiplied by each color channel.
gain is a :class:`~kivy.properties.NumericProperty` and
defaults to 1.0 (which has no effect).
'''
shades = NumericProperty(16)
'''
Sets the number of shades per channel to reduce it to.
shades is a :class:`~kivy.properties.NumericProperty` and
defaults to 16.
'''
def get_effects(self) -> List["EffectBase"]:
effects = []
if bool(self.dot_filter):
effects.append(DotFilterEffect(
width=self.width,
height=self.height,
dots_x=self.dots_x,
dots_y=self.dots_y,
blur=self.blur,
dot_size=self.dot_size,
background_color=self.background_color
))
if self.shades > 0:
effects.append(ReduceEffect(shades=self.shades))
effects.append(GainEffect(gain=self.gain))
return effects
effect_cls = ColorDmdEffect
name = 'color_dmd'
| mit | 8f8b2d56cf2d6a8e601ac22c43d6f470 | 29.145038 | 101 | 0.658141 | 3.82655 | false | false | false | false |
missionpinball/mpf-mc | mpfmc/core/assets.py | 1 | 5327 | """Threaded Asset Loader for MC."""
import logging
import threading
import traceback
from queue import PriorityQueue, Queue, Empty
import sys
from mpf.core.assets import BaseAssetManager
from mpf.exceptions.config_file_error import ConfigFileError
class ThreadedAssetManager(BaseAssetManager):
"""AssetManager which uses the Threading module."""
def __init__(self, machine):
"""Initialise queues and start loader thread."""
super().__init__(machine)
self.loader_queue = PriorityQueue() # assets for to the loader thread
self.loaded_queue = Queue() # assets loaded from the loader thread
self.loader_thread = None
self._loaded_watcher = False
self._start_loader_thread()
def _start_loader_thread(self):
self.loader_thread = AssetLoader(loader_queue=self.loader_queue,
loaded_queue=self.loaded_queue,
exception_queue=self.machine.crash_queue,
thread_stopper=self.machine.thread_stopper)
self.loader_thread.daemon = True
self.loader_thread.start()
def load_asset(self, asset):
"""Put asset in loader queue."""
# Internal method which handles the logistics of actually loading an
# asset. Should only be called by Asset.load() as that method does
# additional things that are needed.
self.num_assets_to_load += 1
# It's ok for an asset to make it onto this queue twice as the loader
# thread will check the asset's loaded attribute to make sure it needs
# to load it.
# This is a PriorityQueue which will automatically put the asset into
# the proper position in the queue based on its priority.
self.loader_queue.put(asset)
if not self._loaded_watcher:
self._loaded_watcher = self.machine.clock.schedule_interval(self._check_loader_status, 0)
def _check_loader_status(self, *args):
del args
# checks the loaded queue and updates loading stats
try:
while not self.loaded_queue.empty():
asset, loaded = self.loaded_queue.get()
if loaded:
asset.is_loaded()
self.num_assets_loaded += 1
self._post_loading_event()
except AttributeError:
pass
if self.num_assets_to_load == self.num_assets_loaded:
self.num_assets_loaded = 0
self.num_assets_to_load = 0
self.machine.clock.unschedule(self._loaded_watcher)
self._loaded_watcher = None
class AssetLoader(threading.Thread):
"""Base class for the Asset Loader thread and actually loads the assets from disk.
Args:
loader_queue: A reference to the asset manager's loader_queue which
holds assets waiting to be loaded. Items are automatically sorted
in reverse order by priority, then creation ID.
loaded_queue: A reference to the asset manager's loaded_queue which
holds assets that have just been loaded. Entries are Asset
instances.
exception_queue: Send a reference to self.machine.crash_queue. This way if
the asset loader crashes, it will write the crash to that queue and
cause an exception in the main thread. Otherwise it fails silently
which is super annoying. :)
"""
def __init__(self, loader_queue, loaded_queue, exception_queue,
thread_stopper):
"""Initialise asset loader."""
threading.Thread.__init__(self)
self.log = logging.getLogger('Asset Loader')
self.loader_queue = loader_queue
self.loaded_queue = loaded_queue
self.exception_queue = exception_queue
self.thread_stopper = thread_stopper
self.name = 'asset_loader'
def run(self):
"""Run loop for the loader thread."""
try: # wrap the so we can send exceptions to the main thread
while not self.thread_stopper.is_set():
try:
asset = self.loader_queue.get(block=True, timeout=1)
except Empty:
asset = None
if asset:
with asset.lock:
if not asset.loaded:
try:
asset.do_load()
except Exception as e:
raise ConfigFileError(
"Error while loading {} asset file '{}'".format(asset.attribute, asset.file),
1, self.log.name, asset.name) from e
self.loaded_queue.put((asset, True))
else:
self.loaded_queue.put((asset, False))
return
# pylint: disable-msg=broad-except
except Exception: # pragma: no cover
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value,
exc_traceback)
msg = ''.join(line for line in lines)
self.exception_queue.put(msg)
raise
| mit | 22677bce96def22890b76950f7b45a70 | 38.753731 | 113 | 0.57706 | 4.726708 | false | false | false | false |
kronenthaler/mod-pbxproj | mod_pbxproj.py | 1 | 2082 | # MIT License
#
# Copyright (c) 2016 Ignacio Calderon aka kronenthaler
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# This is a backwards-compatibility file. For Unity developers this is the only file it needs to be added to the Unity
# project.
# This file will install the proper python package into the user's python's local space, if it's not present at run-time
# of this script. Afterwards, it will import all necessary modules to the developer to make his/her own script work as
# before.
from setuptools import setup
import site
__author__ = 'kronenthaler'
__version__ = '2.0.1'
__package_name__ = 'mod_pbxproj_installer'
try:
# check if file exists
from pbxproj import XcodeProject
except:
# install it if not present
print('Installing package...')
setup(name=__package_name__,
license='MIT License',
install_requires=['pbxproj'],
script_args=['install', '--user', '--force', '--record', '.uninstall_files'])
# force the refresh of the packages
reload(site)
# expose import publicly
from pbxproj import *
| mit | b999f0eab104f5742dc2606ad5eb24c6 | 39.823529 | 120 | 0.742555 | 4.14741 | false | false | false | false |
kronenthaler/mod-pbxproj | tests/pbxsections/TestPBXFileReference.py | 1 | 1492 | import unittest
from pbxproj.pbxsections.PBXFileReference import PBXFileReference
class PBXFileReferenceTest(unittest.TestCase):
def testPrintOnSingleLine(self):
obj = {"isa": "PBXFileReference", "name": "something"}
dobj = PBXFileReference().parse(obj)
self.assertEqual(dobj.__repr__(), "{isa = PBXFileReference; name = something; }")
def testSetLastKnownType(self):
dobj = PBXFileReference.create("path")
dobj.set_last_known_file_type('something')
self.assertEqual(dobj.lastKnownFileType, "something")
self.assertIsNone(dobj['explicitFileType'])
def testSetExplicityFileType(self):
dobj = PBXFileReference.create("path")
dobj.set_explicit_file_type('something')
self.assertEqual(dobj.explicitFileType, "something")
self.assertIsNone(dobj['lastKnownFileType'])
def testSetLastTypeRemovesExplicit(self):
dobj = PBXFileReference.create("path")
dobj.set_explicit_file_type('something')
dobj.set_last_known_file_type('something')
self.assertEqual(dobj.lastKnownFileType, "something")
self.assertIsNone(dobj['explicitFileType'])
def testSetExplicitRemovesLastType(self):
dobj = PBXFileReference.create("path")
dobj.set_last_known_file_type('something')
dobj.set_explicit_file_type('something')
self.assertEqual(dobj.explicitFileType, "something")
self.assertIsNone(dobj['lastKnownFileType'])
| mit | 34c5d24261884f1201b6b9811d84e51d | 32.155556 | 89 | 0.691689 | 4.155989 | false | true | false | false |
ottowayi/pycomm3 | pycomm3/packets/logix.py | 1 | 15290 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2021 Ian Ottoway <ian@ottoway.dev>
# Copyright (c) 2014 Agostino Ruscito <ruscito@gmail.com>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import logging
from itertools import tee, zip_longest
from reprlib import repr as _r
from typing import Dict, Any, Sequence, Union
from ..util import cycle
from .ethernetip import SendUnitDataRequestPacket, SendUnitDataResponsePacket
from .util import parse_read_reply, request_path, tag_request_path
from ..cip import ClassCode, Services, DataTypes, UINT, UDINT, ULINT
from ..const import STRUCTURE_READ_REPLY
from ..exceptions import RequestError
class TagServiceResponsePacket(SendUnitDataResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
def __init__(self, request: "TagServiceRequestPacket", raw_data: bytes = None):
self.tag = request.tag
self.elements = request.elements
self.tag_info = request.tag_info
super().__init__(request, raw_data)
class TagServiceRequestPacket(SendUnitDataRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
response_class = TagServiceResponsePacket
tag_service = None
def __init__(
self,
sequence: cycle,
tag: str,
elements: int,
tag_info: Dict[str, Any],
request_id: int,
use_instance_id: bool = True,
):
super().__init__(sequence)
self.tag = tag
self.elements = elements
self.tag_info = tag_info
self.request_id = request_id
self._use_instance_id = use_instance_id
self.request_path = None
def tag_only_message(self):
return b"".join((self.tag_service, self.request_path, UINT.encode(self.elements)))
class ReadTagResponsePacket(TagServiceResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
def __init__(self, request: "ReadTagRequestPacket", raw_data: bytes = None):
self.value = None
self.data_type = None
super().__init__(request, raw_data)
def _parse_reply(self, dont_parse: bool = False):
try:
super()._parse_reply()
if self.is_valid() and not dont_parse:
self.value, self.data_type = parse_read_reply(
self.data, self.tag_info, self.elements
)
except Exception as err:
self.__log.exception("Failed parsing reply data")
self.value = None
self._error = f"Failed to parse reply - {err}"
def __repr__(self):
return f"{self.__class__.__name__}({self.data_type!r}, {_r(self.value)}, {self.service_status!r})"
class ReadTagRequestPacket(TagServiceRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "read"
response_class = ReadTagResponsePacket
tag_service = Services.read_tag
def _setup_message(self):
super()._setup_message()
if self.request_path is None:
self.request_path = tag_request_path(self.tag, self.tag_info, self._use_instance_id)
if self.request_path is None:
self._error = "Failed to build request path for tag"
self._msg.append(self.tag_only_message())
class ReadTagFragmentedResponsePacket(ReadTagResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
def __init__(self, request: "ReadTagFragmentedRequestPacket", raw_data: bytes = None):
self.value = None
self._data_type = None
self.value_bytes = None
super().__init__(request, raw_data)
def _parse_reply(self):
super()._parse_reply(dont_parse=True)
if self.data[:2] == STRUCTURE_READ_REPLY:
self.value_bytes = self.data[4:]
self._data_type = self.data[:4]
else:
self.value_bytes = self.data[2:]
self._data_type = self.data[:2]
def parse_value(self):
try:
if self.is_valid():
self.value, self.data_type = parse_read_reply(
self._data_type + self.value_bytes,
self.request.tag_info,
self.request.elements,
)
else:
self.value, self.data_type = None, None
except Exception as err:
self.__log.exception("Failed parsing reply data")
self.value = None
self._error = f"Failed to parse reply - {err}"
def __repr__(self):
return f"{self.__class__.__name__}(raw_data={_r(self.raw)})"
__str__ = __repr__
class ReadTagFragmentedRequestPacket(ReadTagRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "read"
response_class = ReadTagFragmentedResponsePacket
tag_service = Services.read_tag_fragmented
def __init__(
self,
sequence: cycle,
tag: str,
elements: int,
tag_info: Dict[str, Any],
request_id: int,
use_instance_id: bool = True,
offset: int = 0,
):
super().__init__(sequence, tag, elements, tag_info, request_id, use_instance_id)
self.offset = offset
def _setup_message(self):
super()._setup_message()
self._msg.append(UDINT.encode(self.offset))
@classmethod
def from_request(
cls,
sequence: cycle,
request: Union[ReadTagRequestPacket, "ReadTagFragmentedRequestPacket"],
offset=0,
) -> "ReadTagFragmentedRequestPacket":
new_request = cls(
next(sequence),
request.tag,
request.elements,
request.tag_info,
request.request_id,
request._use_instance_id,
offset,
)
new_request.request_path = request.request_path
return new_request
def __repr__(self):
return f"{self.__class__.__name__}(tag={self.tag!r}, elements={self.elements!r})"
class WriteTagResponsePacket(TagServiceResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
def __init__(self, request: "WriteTagRequestPacket", raw_data: bytes = None):
self.value = request.value
self.data_type = request.data_type
super().__init__(request, raw_data)
class WriteTagRequestPacket(TagServiceRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "write"
response_class = WriteTagResponsePacket
tag_service = Services.write_tag
def __init__(
self,
sequence: cycle,
tag: str,
elements: int,
tag_info: Dict[str, Any],
request_id: int,
use_instance_id: bool = True,
value: bytes = b"",
):
super().__init__(sequence, tag, elements, tag_info, request_id, use_instance_id)
self.value = value
self.data_type = tag_info["data_type_name"]
self._packed_data_type = None
if tag_info["tag_type"] == "struct":
if not isinstance(value, (bytes, bytearray)):
raise RequestError("Writing UDTs only supports bytes for value")
self._packed_data_type = b"\xA0\x02" + UINT.encode(
tag_info["data_type"]["template"]["structure_handle"]
)
elif self.data_type not in DataTypes:
raise RequestError(f"Unsupported data type: {self.data_type!r}")
else:
self._packed_data_type = UINT.encode(DataTypes[self.data_type].code)
def _setup_message(self):
super()._setup_message()
if self.request_path is None:
self.request_path = tag_request_path(self.tag, self.tag_info, self._use_instance_id)
if self.request_path is None:
self.error = f"Failed to build request path for tag"
self._msg.append(self.tag_only_message())
def tag_only_message(self):
return b"".join(
(
self.tag_service,
self.request_path,
self._packed_data_type,
UINT.encode(self.elements),
self.value,
)
)
def __repr__(self):
return f"{self.__class__.__name__}(tag={self.tag!r}, value={_r(self.value)}, elements={self.elements!r})"
class WriteTagFragmentedResponsePacket(WriteTagResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
class WriteTagFragmentedRequestPacket(WriteTagRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "write"
response_class = WriteTagFragmentedResponsePacket
tag_service = Services.write_tag_fragmented
def __init__(
self,
sequence: cycle,
tag: str,
elements: int,
tag_info: Dict[str, Any],
request_id: int,
use_instance_id: bool = True,
offset: int = 0,
value: bytes = b"",
):
super().__init__(sequence, tag, elements, tag_info, request_id, use_instance_id)
self.offset = offset
self.value = value
def tag_only_message(self):
return b"".join(
(
self.tag_service,
self.request_path,
self._packed_data_type,
UINT.encode(self.elements),
UDINT.encode(self.offset),
self.value,
)
)
@classmethod
def from_request(
cls,
sequence: cycle,
request: WriteTagRequestPacket,
offset: int = 0,
value: bytes = b"",
) -> "WriteTagFragmentedRequestPacket":
new_request = cls(
next(sequence),
request.tag,
request.elements,
request.tag_info,
request.request_id,
request._use_instance_id,
offset,
value or request.value,
)
new_request.request_path = request.request_path
return new_request
class ReadModifyWriteResponsePacket(WriteTagResponsePacket):
...
class ReadModifyWriteRequestPacket(SendUnitDataRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "write"
response_class = ReadModifyWriteResponsePacket
tag_service = Services.read_modify_write
def __init__(
self,
sequence: cycle,
tag: str,
tag_info: Dict[str, Any],
request_id: int,
use_instance_id: bool = True,
):
super().__init__(sequence)
self.tag = tag
self.value = None
self.elements = 0
self.tag_info = tag_info
self.request_id = request_id
self._use_instance_id = use_instance_id
self.data_type = tag_info["data_type_name"]
self.request_path = tag_request_path(tag, tag_info, use_instance_id)
self.bits = []
self._request_ids = []
self._and_mask = 0xFFFF_FFFF_FFFF_FFFF
self._or_mask = 0x0000_0000_0000_0000
self._mask_size = DataTypes.get(self.data_type).size
if self._mask_size is None:
raise RequestError(f'Invalid data type {tag_info["data_type"]} for writing bits')
if self.request_path is None:
self.error = "Failed to create request path for tag"
def set_bit(self, bit: int, value: bool, request_id: int):
if self.data_type == "DWORD":
bit %= 32
if value:
self._or_mask |= 1 << bit
self._and_mask |= 1 << bit
else:
self._or_mask &= ~(1 << bit)
self._and_mask &= ~(1 << bit)
self.bits.append(bit)
self._request_ids.append(request_id)
def _setup_message(self):
super()._setup_message()
self._msg += [
self.tag_service,
self.request_path,
UINT.encode(self._mask_size),
ULINT.encode(self._or_mask)[: self._mask_size],
ULINT.encode(self._and_mask)[: self._and_mask],
]
class MultiServiceResponsePacket(SendUnitDataResponsePacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
def __init__(self, request: "MultiServiceRequestPacket", raw_data: bytes = None):
self.request = request
self.values = None
self.request_statuses = None
self.responses = []
super().__init__(request, raw_data)
def _parse_reply(self):
super()._parse_reply()
num_replies = UINT.decode(self.data)
offset_data = self.data[2 : 2 + 2 * num_replies]
offsets = (UINT.decode(offset_data[i : i + 2]) for i in range(0, len(offset_data), 2))
start, end = tee(offsets) # split offsets into start/end indexes
next(end) # advance end by 1 so 2nd item is the end index for the first item
reply_data = [self.data[i:j] for i, j in zip_longest(start, end)]
padding = bytes(46) # pad the front of the packet so it matches the size of
# a read tag response, probably not the best idea but it works for now
for data, request in zip(reply_data, self.request.requests):
response = request.response_class(request, padding + data)
self.responses.append(response)
def __repr__(self):
return f"{self.__class__.__name__}(values={_r(self.values)}, error={self.error!r})"
class MultiServiceRequestPacket(SendUnitDataRequestPacket):
__log = logging.getLogger(f"{__module__}.{__qualname__}")
type_ = "multi"
response_class = MultiServiceResponsePacket
def __init__(self, sequence: cycle, requests: Sequence[TagServiceRequestPacket]):
super().__init__(sequence)
self.requests = requests
self.request_path = request_path(ClassCode.message_router, 1)
def _setup_message(self):
super()._setup_message()
self._msg += [Services.multiple_service_request, self.request_path]
def build_message(self):
super().build_message()
num_requests = len(self.requests)
self._msg.append(UINT.encode(num_requests))
offset = 2 + (num_requests * 2)
offsets = []
messages = [request.tag_only_message() for request in self.requests]
for msg in messages:
offsets.append(UINT.encode(offset))
offset += len(msg)
return b"".join(self._msg + offsets + messages)
| mit | eda154f535169fb930a6ac75f2ca9327 | 33.282511 | 113 | 0.598365 | 3.869906 | false | false | false | false |
kronenthaler/mod-pbxproj | tests/pbxsections/TestXCBuildConfiguration.py | 1 | 5185 | import unittest
from pbxproj.pbxsections.XCBuildConfiguration import XCBuildConfiguration
class XCBuildConfigurationTest(unittest.TestCase):
def testAddFlagOnNewObject(self):
obj = XCBuildConfiguration()
obj.add_flags('flag', '-flag')
self.assertIsNotNone(obj.buildSettings)
self.assertIsNotNone(obj.buildSettings.flag)
self.assertEqual(obj.buildSettings.flag, '-flag')
def testAddFlagsOnNewObject(self):
obj = XCBuildConfiguration()
obj.add_flags('flag', ['-flag', '-another-flag'])
self.assertIsNotNone(obj.buildSettings)
self.assertIsNotNone(obj.buildSettings.flag)
self.assertListEqual(obj.buildSettings.flag, ['-flag', '-another-flag'])
def testAddFlagOnSingleFlag(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': '-flag'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_flags('flag', '-another-flag')
self.assertListEqual(dobj.buildSettings.flag, ['-flag', '-another-flag'])
def testAddFlagOnMultipleFlags(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': ['-flag', '-b-flag']}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_flags('flag', '-another-flag')
self.assertListEqual(dobj.buildSettings.flag, ['-flag', '-b-flag', '-another-flag'])
def testAddFlagsOnSingleFlag(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': '-flag'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_flags('flag', ['-another-flag'])
self.assertListEqual(dobj.buildSettings.flag, ['-flag', '-another-flag'])
def testAddFlagsOnMultipleFlags(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': ['-flag', '-b-flag']}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_flags('flag', ['-another-flag'])
self.assertListEqual(dobj.buildSettings.flag, ['-flag', '-b-flag', '-another-flag'])
def testRemoveFlagOnEmpty(self):
obj = {'isa': 'XCBuildConfiguration'}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_flags('flag', '-flag')
self.assertIsNone(dobj['buildSettings'])
def testRemoveFlagNonExistent(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag1': '-flag'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_flags('flag', '-flag')
self.assertIsNone(dobj.buildSettings['flag'])
def testRemoveFlagOnSingleValue(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': '-flag'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_flags('flag', '-flag')
self.assertIsNone(dobj.buildSettings['flag'])
def testRemoveFlagAllValues(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': '-flag'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_flags('flag', None)
self.assertIsNone(dobj.buildSettings['flag'])
def testRemoveFlagOnMultipleValue(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'flag': ['-flag', '-b-flag']}}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_flags('flag', '-flag')
self.assertEqual(dobj.buildSettings.flag, '-b-flag')
def testAddSearchPathRecursiveUnEscaped(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_search_paths('search', '$(SRC_ROOT)', recursive=True)
self.assertEqual(dobj.buildSettings.search, '$(SRC_ROOT)/**')
def testAddSearchPathNonRecursiveUnEscaped(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_search_paths('search', '$(SRC_ROOT)', recursive=False)
self.assertEqual(dobj.buildSettings.search, '$(SRC_ROOT)')
def testAddSearchPathRecursiveEscaped(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_search_paths('search', '$(SRC_ROOT)', recursive=True, escape=True)
self.assertEqual(dobj.buildSettings.search, '"$(SRC_ROOT)"/**')
def testAddSearchPathNonRecursiveEscaped(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_search_paths('search', '$(SRC_ROOT)', recursive=False, escape=True)
self.assertEqual(dobj.buildSettings.search, '"$(SRC_ROOT)"')
def testAddSearchPathInherit(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {}}
dobj = XCBuildConfiguration().parse(obj)
dobj.add_search_paths('search', '$(inherited)')
self.assertEqual(dobj.buildSettings.search, '$(inherited)')
def testRemoveSearchPath(self):
obj = {'isa': 'XCBuildConfiguration', 'buildSettings': {'search': '$(inherited)'}}
dobj = XCBuildConfiguration().parse(obj)
dobj.remove_search_paths('search', '$(inherited)')
self.assertIsNone(dobj.buildSettings['search'])
| mit | 8e3314b1a7edaf0bcc38033455921687 | 38.280303 | 94 | 0.647252 | 3.880988 | false | true | false | false |
oemof/feedinlib | tests/test_examples.py | 1 | 2121 | import os
import subprocess
import tempfile
import nbformat
import pytest
class TestExamples:
def _notebook_run(self, path):
"""
Execute a notebook via nbconvert and collect output.
Returns (parsed nb object, execution errors)
"""
dirname, __ = os.path.split(path)
os.chdir(dirname)
with tempfile.NamedTemporaryFile(suffix=".ipynb") as fout:
args = ["jupyter", "nbconvert", "--to", "notebook", "--execute",
"--ExecutePreprocessor.timeout=200",
"--output", fout.name, path]
subprocess.check_call(args)
fout.seek(0)
nb = nbformat.read(fout, nbformat.current_nbformat)
errors = [output for cell in nb.cells if "outputs" in cell
for output in cell["outputs"]
if output.output_type == "error"]
return nb, errors
@pytest.mark.skip(reason="Should examples be part of the package"
"in the first place?.")
def test_load_era5_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'load_era5_weather_data.ipynb'))
assert errors == []
@pytest.mark.skip(reason="Requires open_FRED,"
"which depends on oemof <0.4.")
def test_pvlib_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_pvlib_model.ipynb'))
assert errors == []
@pytest.mark.skip(reason="Requires open_FRED,"
"which depends on oemof <0.4.")
def test_windpowerlib_turbine_ipynb(self):
parent_dirname = os.path.dirname(os.path.dirname(__file__))
nb, errors = self._notebook_run(
os.path.join(parent_dirname, 'example',
'run_windpowerlib_turbine_model.ipynb'))
assert errors == []
| mit | d4b67e7d313638451906c0a87fbed172 | 35.568966 | 76 | 0.558227 | 3.986842 | false | true | false | false |
openpathsampling/openpathsampling | openpathsampling/experimental/storage/test_mdtraj_json.py | 3 | 2273 | from .mdtraj_json import *
import pytest
import numpy as np
import numpy.testing as npt
from ..simstore.custom_json import bytes_codec, numpy_codec, custom_json_factory
from ..simstore.test_custom_json import CustomJSONCodingTest
from openpathsampling.tests.test_helpers import data_filename
class MDTrajCodingTest(CustomJSONCodingTest):
def setup(self):
if not HAS_MDTRAJ:
pytest.skip()
self.filename = data_filename('ala_small_traj.pdb')
def test_default(self):
# custom for handling numpy
for (obj, dct) in zip(self.objs, self.dcts):
default = self.codec.default(obj)
numpy_attrs = [attr for attr, val in dct.items()
if isinstance(val, np.ndarray)]
other_attrs = [attr for attr, val in dct.items()
if not isinstance(val, np.ndarray)]
for attr in numpy_attrs:
npt.assert_array_equal(default[attr], dct[attr])
for attr in other_attrs:
assert default[attr] == dct[attr]
def test_round_trip(self):
codecs = [numpy_codec, bytes_codec] + mdtraj_codecs
encoder, decoder = custom_json_factory(codecs)
self._test_round_trip(encoder, decoder)
class TestTopologyCoding(MDTrajCodingTest):
def setup(self):
super(TestTopologyCoding, self).setup()
self.codec = top_codec
top = md.load(self.filename).topology
dataframe, bonds = top.to_dataframe()
self.objs = [top]
self.dcts = [{
'__class__': 'Topology',
'__module__': 'mdtraj.core.topology',
'atoms': dataframe.to_json(),
'bonds': bonds
}]
class TestTrajectoryCoding(MDTrajCodingTest):
def setup(self):
super(TestTrajectoryCoding, self).setup()
self.codec = traj_codec
traj = md.load(self.filename)
self.objs = [traj]
self.dcts = [{
'__class__': 'Trajectory',
'__module__': 'mdtraj.core.trajectory',
'xyz': traj.xyz,
'topology': traj.topology,
'time': traj.time,
'unitcell_lengths': traj.unitcell_lengths,
'unitcell_angles': traj.unitcell_angles
}]
| mit | f4806c62637496c5af1f91af305ef435 | 31.471429 | 80 | 0.587769 | 3.820168 | false | true | false | false |
openpathsampling/openpathsampling | openpathsampling/analysis/tis/crossing_probability.py | 3 | 5290 | import collections
import openpathsampling as paths
from openpathsampling.netcdfplus import StorableNamedObject
from openpathsampling.numerics import LookupFunction
import pandas as pd
import numpy as np
from .core import EnsembleHistogrammer, MultiEnsembleSamplingAnalyzer
class FullHistogramMaxLambdas(EnsembleHistogrammer):
"""Histogramming the full max-lambda function (one way of getting TCP)
This histograms the maximum value of lambda for each ensemble. One of
these objects is made per transition.
Parameters
----------
transition: :class:`.TISTransition`
the transition to be analyzed
hist_parameters: dict
Histogram parameters to use with this collective variable: allowed
keys are 'bin_width' and 'bin_range'; value for 'bin_width' is a
float; for 'bin_range' is a tuple with `(left_edge, right_edge)`
(only left edge is used)
max_lambda_func: callable
function to use to map the trajectories to a histogram; default is
`None`, which uses the maximum value of the order parameter
associated with the interface set. Overriding this can be used if
either (a) the interface set does not have an order parameter
associated with it, or (b) you want to calculate the values along
some other order parameter
"""
def __init__(self, transition, hist_parameters, max_lambda_func=None):
self.transition = transition
if max_lambda_func is None:
try:
max_lambda_func = transition.interfaces.cv_max
except AttributeError:
pass # leave max_lambda_func as None
if max_lambda_func is None:
raise RuntimeError("Can't identify function to determine max "
+ "value of order parameter.")
# TODO: is this used?
self.lambdas = {e: l for (e, l) in zip(transition.ensembles,
transition.interfaces.lambdas)}
super(FullHistogramMaxLambdas, self).__init__(
ensembles=transition.ensembles,
f=max_lambda_func,
hist_parameters=hist_parameters
)
#class PerEnsembleMaxLambdas(EnsembleHistogrammer):
# TODO: this just maps the count to the ensemble, not the full histogram
#def __init__(self, transition):
#interfaces_lambdas = transition.interfaces.lambdas
class TotalCrossingProbability(MultiEnsembleSamplingAnalyzer):
"""
Calculate the total crossing probability function.
The total crossing probability function is generated by calculating the
individual ensemble crossing probability functions (using, e.g.,
:class:`.FullHistogramMaxLambdas`, and combining them using some
combining method (default is :class:`.WHAM`). One of these objects is
instantiated per transition.
Parameters
----------
max_lambda_calc: :class:`.EnsembleHistogrammer`
usually :class:`.FullHistogramMaxLambdas`; object that creates the
max lambda histograms for the ensembles associated with this
transition.
combiner: TODO
class that combines multiple histograms (with restricted sampling)
into a single result. If `None` (default), uses :class:`.WHAM`
"""
def __init__(self, max_lambda_calc, combiner=None):
transition = max_lambda_calc.transition
super(TotalCrossingProbability, self).__init__(transition.ensembles)
self.max_lambda_calc = max_lambda_calc
self.transition = transition
if combiner is None:
lambdas = self.transition.interfaces.lambdas
combiner = paths.numerics.WHAM(interfaces=lambdas)
self.combiner = combiner
def from_weighted_trajectories(self, input_dict):
"""Calculate results from a weighted trajectories dictionary.
Parameters
----------
input_dict : dict of {:class:`.Ensemble`: collections.Counter}
ensemble as key, and a counter mapping each trajectory
associated with that ensemble to its counter of time spent in
the ensemble (output of ``steps_to_weighted_trajectories``)
Returns
-------
:class:`.LookupFunction`
the total crossing probability function
"""
hists = self.max_lambda_calc.from_weighted_trajectories(input_dict)
return self.from_ensemble_histograms(hists)
def from_ensemble_histograms(self, hists):
"""Calculate results from a dict of ensemble histograms.
Parameters
----------
hists : dict of {:class:`.Ensemble`: :class:`.numerics.Histogram`}
histogram for each ensemble (from ``self.max_lambda_calc``)
Returns
-------
:class:`.LookupFunction`
the total crossing probability function
"""
tcp_results = {}
input_hists = [hists[ens] for ens in self.transition.ensembles]
df = paths.numerics.histograms_to_pandas_dataframe(
input_hists,
fcn="reverse_cumulative"
).sort_index(axis=1)
# TODO: remove WHAM-specific name here
tcp = self.combiner.wham_bam_histogram(df).to_dict()
return LookupFunction(tcp.keys(), tcp.values())
| mit | 749bd30c090483114bda8075827ca86b | 39.692308 | 78 | 0.655198 | 4.437919 | false | false | false | false |
openpathsampling/openpathsampling | openpathsampling/storage/util.py | 4 | 2682 | import openpathsampling as paths
def split_md_storage(filename):
"""
Split storage into two files; trajectories and the rest
Currently this makes only sense for storage with snapshots that use
additional stores. Otherwise we need to store the full snapshots for
CVs anyway and nothing is gained.
"""
st_from = paths.AnalysisStorage(
filename=filename
)
filename_base = '.'.join(filename.split('.')[:-1])
filename_main = filename_base + '_main.nc'
filename_data = filename_base + '_frames.nc'
# `use_uuid=True`, otherwise we cannot later recombine the two!
st_main = paths.Storage(filename=filename_main, mode='w')
st_traj = paths.Storage(filename=filename_data, mode='w')
st_main.snapshots.save(st_from.snapshots[0])
st_traj.snapshots.save(st_from.snapshots[0])
# this will tell the data storage not to save snapshots only a reference
st_main.snapshots.only_mention = True
# save trajectories to data
map(st_traj.trajectories.save, st_from.trajectories)
q = st_from.snapshots.all()
cvs = st_from.cvs
[cv(q) for cv in cvs]
map(st_main.cvs.save, st_from.cvs)
map(st_main.trajectories.mention, st_from.trajectories)
for storage_name in [
'steps', 'pathmovers', 'topologies', 'networks', 'details',
'shootingpointselectors', 'engines', 'volumes', 'samples',
'samplesets', 'ensembles', 'transitions', 'movechanges',
'pathsimulators', 'cvs', 'interfacesets', 'msouters'
]:
map(
getattr(st_main, storage_name).save,
getattr(st_from, storage_name)
)
st_main.close()
st_traj.close()
st_from.close()
def join_md_storage(filename_main, filename_data=None):
if filename_data is None:
filename_data = filename_main[:-7] + 'frames.nc'
filename_to = filename_main[:-7] + 'joined.nc'
st_main = paths.Storage(
filename=filename_main,
mode='r'
)
st_traj = paths.Storage(
filename=filename_data,
mode='r'
)
st_to = paths.Storage(
filename_to,
mode='w'
)
map(st_to.trajectories.save, st_traj.trajectories)
for storage_name in [
'steps',
'pathmovers', 'topologies', 'networks', 'details', 'trajectories',
'shootingpointselectors', 'engines', 'volumes',
'samplesets', 'ensembles', 'transitions', 'movechanges',
'samples', 'pathsimulators', 'cvs', 'interfacesets', 'msouters'
]:
map(
getattr(st_to, storage_name).save,
getattr(st_main, storage_name))
st_traj.close()
st_main.close()
st_to.close()
| mit | 3ce4fab4b1d7e5c5e9b23f175b18c957 | 27.231579 | 76 | 0.623788 | 3.547619 | false | false | false | false |
openpathsampling/openpathsampling | openpathsampling/movechange.py | 3 | 18289 | __author__ = 'Jan-Hendrik Prinz'
import logging
import openpathsampling as paths
from openpathsampling.netcdfplus import StorableObject, lazy_loading_attributes
from openpathsampling.netcdfplus import DelayedLoader
from .treelogic import TreeMixin
logger = logging.getLogger(__name__)
# @lazy_loading_attributes('details')
class MoveChange(TreeMixin, StorableObject):
'''
A class that described the concrete realization of a PathMove.
Attributes
----------
mover : PathMover
The mover that generated this MoveChange
samples : list of Sample
A list of newly generated samples by this particular move.
Only used by node movers like RepEx or Shooters
subchanges : list of MoveChanges
the MoveChanges created by submovers
details : Details
an object that contains MoveType specific attributes and information.
E.g. for a RandomChoiceMover which Mover was selected.
'''
details = DelayedLoader()
def __init__(self, subchanges=None, samples=None, mover=None,
details=None, input_samples=None):
StorableObject.__init__(self)
self._lazy = {}
self._len = None
self._collapsed = None
self._results = None
self._trials = None
self._accepted = None
self.mover = mover
if subchanges is None:
self.subchanges = []
else:
self.subchanges = subchanges
if samples is None:
self.samples = []
else:
self.samples = samples
if input_samples is None:
self.input_samples = []
else:
self.input_samples = input_samples
self.details = details
def __getattr__(self, item):
# try to get attributes from details dict
try:
return getattr(self.details, item)
except AttributeError as e:
msg = "{0} not found in change's details".format(str(item))
if not e.args:
e.args = [msg]
else:
e.args = tuple([e.args[0] + "; " + msg] + list(e.args[1:]))
raise
def to_dict(self):
return {
'mover': self.mover,
'details': self.details,
'samples': self.samples,
'input_samples': self.input_samples,
'subchanges': self.subchanges,
'cls': self.__class__.__name__
}
# hook for TreeMixin
@property
def _subnodes(self):
return self.subchanges
@property
def submovers(self):
return [ch.mover for ch in self.subchanges]
@property
def subchange(self):
"""
Return the single/only sub-movechange if there is only one.
Returns
-------
MoveChange
"""
if len(self.subchanges) == 1:
return self.subchanges[0]
else:
# TODO: might raise exception
return None
@staticmethod
def _default_match(original, test):
if isinstance(test, paths.MoveChange):
return original is test
elif isinstance(test, paths.PathMover):
return original.mover is test
elif issubclass(test, paths.PathMover):
return original.mover.__class__ is test
else:
return False
def movetree(self):
"""
Return a tree with the movers of each node
Notes
-----
This is equivalent to
`tree.map_tree(lambda x : x.mover)`
"""
return self.map_tree(lambda x: x.mover)
@property
def identifier(self):
return self.mover
@property
def collapsed_samples(self):
"""
Return a collapsed set of samples with non used samples removed
This is the minimum required set of samples to keep the `MoveChange`
correct and allow to target sampleset to be correctly created.
These are the samples used by `.closed`
Examples
--------
Assume that you run 3 shooting moves for replica #1. Then only the
last of the three really matters for the target sample_set since #1
will be replaced by #2 which will be replaced by #3. So this function
will return only the last sample.
"""
if self._collapsed is None:
s = paths.SampleSet([]).apply_samples(self.results)
# keep order just for being thorough
self._collapsed = [
samp for samp in self.results
if samp in s
]
return self._collapsed
@property
def accepted(self):
"""
Returns if this particular move was accepted.
Mainly used for rejected samples.
Notes
-----
Acceptance is determined from the number of resulting samples. If at
least one sample is returned then this move will change the sampleset
and is considered an accepted change.
"""
if self._accepted is None:
self._accepted = len(self.results) > 0
return self._accepted
def __add__(self, other):
"""
This allows to use `+` to create SequentialPMCs
Notes
-----
You can also use this to apply several changes
>>> new_sset = old_sset + change1 + change2
>>> new_sset = old_sset + (change1 + change2)
"""
if isinstance(other, MoveChange):
return SequentialMoveChange([self, other])
else:
raise ValueError('Only MoveChanges can be combined')
@property
def results(self):
"""
Returns a list of all samples that are accepted in this move
This contains unnecessary, but accepted samples, too.
Returns
-------
list of Samples
the list of samples that should be applied to the SampleSet
"""
if self._results is None:
self._results = self._get_results()
return self._results
def _get_results(self):
"""
Determines all relevant accepted samples for this move
Includes all accepted samples also from subchanges
Returns
-------
list of Sample
the list of accepted samples for this move
"""
return []
@property
def trials(self):
"""
Returns a list of all samples generated during the PathMove.
This includes all accepted and rejected samples (which does NOT
include hidden samples yet)
"""
if self._trials is None:
self._trials = self._get_trials()
return self._trials
def _get_trials(self):
"""
Determines all samples for this move
Includes all samples also from subchanges
Returns
-------
list of Sample
the list of all samples generated for this move
Notes
-----
This function needs to be implemented for custom changes
"""
return []
def __str__(self):
if self.accepted:
return 'SampleMove : %s : %s : %d samples' % (self.mover.cls, self.accepted, len(self.trials)) + ' ' + str(self.trials) + ''
else:
return 'SampleMove : %s : %s :[]' % (self.mover.cls, self.accepted)
@property
def canonical(self):
"""
Return the first non single-subchange
Notes
-----
Usually a mover that returns a single subchange is for deciding what to
do rather than describing what is actually happening. This property
returns the first mover that is not one of these delegating movers and
contains information of what has been done in this move.
What you are usually interested in is `.canonical.mover` to get the
relevant mover.
Examples
--------
>>> a = OnewayShootingMover()
>>> change = a.move(sset)
>>> change.canonical.mover # returns either Forward or Backward
"""
pmc = self
while pmc.subchange is not None:
if pmc.mover.is_canonical is True:
return pmc
pmc = pmc.subchange
return pmc
@property
def description(self):
"""
Return a compact representation of the change
"""
subs = self.subchanges
if len(subs) == 0:
return str(self.mover)
elif len(subs) == 1:
return subs[0].description
else:
return ':'.join([sub.description for sub in subs])
class EmptyMoveChange(MoveChange):
"""
A MoveChange representing no changes
"""
def __init__(self, mover=None, details=None):
super(EmptyMoveChange, self).__init__(mover=mover, details=details)
def __str__(self):
return ''
def _get_trials(self):
return []
def _get_results(self):
return []
class SampleMoveChange(MoveChange):
"""
A MoveChange representing the application of samples.
This is the most common MoveChange and all other moves use this
as leaves and on the lowest level consist only of `SampleMoveChange`
"""
def __init__(self, samples, mover=None, details=None, input_samples=None):
"""
Parameters
----------
samples : list of Samples
a list of trial samples that are used in this change
mover : PathMover
the generating PathMover
details : Details
a details object containing specifics about the change
Attributes
----------
samples
mover
details
"""
super(SampleMoveChange, self).__init__(
mover=mover,
details=details,
input_samples=input_samples
)
if samples.__class__ is paths.Sample:
samples = [samples]
self.samples = samples
def _get_results(self):
return []
def _get_trials(self):
return self.samples
class AcceptedSampleMoveChange(SampleMoveChange):
"""
Represents an accepted SamplePMC
This will return the trial samples also as its result, hence it is
accepted.
"""
def _get_trials(self):
return self.samples
def _get_results(self):
return self.samples
class RejectedSampleMoveChange(SampleMoveChange):
"""
Represents an rejected SamplePMC
This will return no samples also as its result, hence it is
rejected.
"""
class RejectedNaNSampleMoveChange(RejectedSampleMoveChange):
"""
Represents an rejected SamplePMC because of occurance of NaN
This will return no samples as its result, hence it is rejected.
"""
pass
class RejectedMaxLengthSampleMoveChange(RejectedSampleMoveChange):
"""
Represents an rejected SamplePMC because of hitting the max length limit
This will return no samples as its result, hence it is rejected.
"""
pass
class SequentialMoveChange(MoveChange):
"""
SequentialMoveChange has no own samples, only inferred Sampled from the
underlying MovePaths
"""
def __init__(self, subchanges, mover=None, details=None):
"""
Parameters
----------
subchanges : list of MoveChanges
a list of MoveChanges to be applied in sequence
mover
details
Attributes
----------
subchanges
mover
details
"""
super(SequentialMoveChange, self).__init__(mover=mover, details=details)
self.subchanges = subchanges
def _get_results(self):
samples = []
for subchange in self.subchanges:
samples = samples + subchange.results
return samples
def _get_trials(self):
samples = []
for subchange in self.subchanges:
samples = samples + subchange.trials
return samples
def __str__(self):
return 'SequentialMove : %s : %d samples\n' % \
(self.accepted, len(self.results)) + \
MoveChange._indent('\n'.join(map(str, self.subchanges)))
class PartialAcceptanceSequentialMoveChange(SequentialMoveChange):
"""
PartialAcceptanceSequentialMovePath has no own samples, only inferred
Sampled from the underlying MovePaths
"""
def _get_results(self):
changes = []
for subchange in self.subchanges:
if subchange.accepted:
changes.extend(subchange.results)
else:
break
return changes
def __str__(self):
return 'PartialAcceptanceMove : %s : %d samples\n' % \
(self.accepted, len(self.results)) + \
MoveChange._indent('\n'.join(map(str, self.subchanges)))
class ConditionalSequentialMoveChange(SequentialMoveChange):
"""
ConditionalSequentialMovePath has no own samples, only inferred Samples
from the underlying MovePaths
"""
def _get_results(self):
changes = []
for subchange in self.subchanges:
if subchange.accepted:
changes.extend(subchange.results)
else:
return []
return changes
def __str__(self):
return 'ConditionalSequentialMove : %s : %d samples\n' % \
(self.accepted, len(self.results)) + \
MoveChange._indent('\n'.join(map(str, self.subchanges)))
class NonCanonicalConditionalSequentialMoveChange(
ConditionalSequentialMoveChange):
""" Special move change for reactive flux and S-shooting simulation.
This move change inherits from :class:`.ConditionalSequentialMoveChange`
and returns the outcome of the last subchange.
"""
@property
def canonical(self):
return self.subchanges[-1]
class SubMoveChange(MoveChange):
"""
A helper MoveChange that represents the application of a submover.
The raw implementation delegates all to the subchange
"""
def __init__(self, subchange, mover=None, details=None):
"""
Parameters
----------
subchange : MoveChange
the actual subchange used by this wrapper PMC
mover
details
Attributes
----------
subchange
mover
details
"""
super(SubMoveChange, self).__init__(mover=mover, details=details)
self.subchanges = [subchange]
def _get_results(self):
return self.subchange.results
def _get_trials(self):
return self.subchange.trials
def __str__(self):
# Defaults to use the name of the used mover
return self.mover.__class__.__name__[:-5] + ' :\n' + MoveChange._indent(str(self.subchange))
class RandomChoiceMoveChange(SubMoveChange):
"""
A MoveChange that represents the application of a mover chosen randomly
"""
# This class is empty since all of the decision is specified by the mover
# and it requires no additional logic to decide if it is accepted.
class FilterByEnsembleMoveChange(SubMoveChange):
"""
A MoveChange that filters out all samples not in specified ensembles
"""
# TODO: Question: filter out also trials not in the ensembles? I think so,
# because we are only interested in trials that could be relevant, right?
def _get_results(self):
all_samples = self.subchange.results
filtered_samples = list(filter(
lambda s: s.ensemble in self.mover.ensembles,
all_samples
))
return filtered_samples
def _get_trials(self):
all_samples = self.subchange.trials
filtered_samples = list(filter(
lambda s: s.ensemble in self.mover.ensembles,
all_samples
))
return filtered_samples
def __str__(self):
return 'FilterMove : allow only ensembles [%s] from sub moves : %s : %d samples\n' % \
(str(self.mover.ensembles), self.accepted, len(self.results)) + \
MoveChange._indent(str(self.subchange))
class FilterSamplesMoveChange(SubMoveChange):
"""
A MoveChange that keeps a selection of the underlying samples
"""
def _get_results(self):
sample_set = self.subchange.results
# allow for negative indices to be picked, e.g. -1 is the last sample
samples = [idx % len(sample_set) for idx in self.mover.selected_samples]
return samples
def __str__(self):
return 'FilterMove : pick samples [%s] from sub moves : %s : %d samples\n' % \
(str(self.mover.selected_samples), self.accepted, len(self.results)) + \
MoveChange._indent(str(self.subchange))
class KeepLastSampleMoveChange(SubMoveChange):
"""
A MoveChange that only keeps the last generated sample.
This is different from using `.reduced` which will only change the
level of detail that is stored. This MoveChange will actually remove
potential relevant samples and thus affect the outcome of the new
SampleSet. To really remove samples also from storage you can use
this MoveChange in combination with `.closed` or `.reduced`
Notes
-----
Does the same as `FilterSamplesMoveChange(subchange, [-1], False)`
I think we should try to not use this. It would be better to make submoves
and finally filter by relevant ensembles. Much like running a function
with local variables/local ensembles.
"""
def _get_results(self):
samples = self.subchange.results
if len(samples) > 1:
samples = [samples[-1]]
return samples
def __str__(self):
return 'Restrict to last sample : %s : %d samples\n' % \
(self.accepted, len(self.results)) + \
MoveChange._indent(str(self.subchange))
class PathSimulatorMoveChange(SubMoveChange):
"""
A MoveChange that just wraps a subchange and references a PathSimulator
"""
def __str__(self):
return 'PathSimulatorStep : %s : Step # %d with %d samples\n' % \
(str(self.mover.pathsimulator.cls), self.details.step, len(self.results)) + \
MoveChange._indent(str(self.subchange))
| mit | a1c71557317e00418c9e0d8c4a7100de | 27.576563 | 136 | 0.597299 | 4.45747 | false | false | false | false |
openpathsampling/openpathsampling | openpathsampling/experimental/simstore/storage.py | 3 | 28000 | """
A simple storage interface for simulation objects and data objects.
Reserved words
--------------
Table names:
* ``uuid``
* ``tables``
* ``metadata``
Column names:
* ``uuid``
* ``idx``
"""
import logging
import collections
from collections import abc
import itertools
from . import tools
from .serialization_helpers import get_uuid, get_all_uuids
from .serialization_helpers import get_all_uuids_loading
from .serialization_helpers import get_reload_order
# from .serialization import Serialization
from .proxy import ProxyObjectFactory, GenericLazyLoader
from .storable_functions import StorageFunctionHandler, StorableFunction
from .tags_table import TagsTable
from .type_ident import STANDARD_TYPING
try:
basestring
except NameError:
basestring = str
logger = logging.getLogger(__name__)
# these two tables are required in *all* schema
universal_schema = {
'uuid': [('uuid', 'uuid'), ('table', 'int'), ('row', 'int')],
'tables': [('name', 'str'), ('idx', 'int'), ('module', 'str'),
('class_name', 'str')],
'sfr_result_types': [('uuid', 'str'), ('result_type', 'str')],
'tags': [('name', 'str'), ('content', 'uuid')],
}
from openpathsampling.netcdfplus import StorableNamedObject
class GeneralStorage(StorableNamedObject):
_known_storages = {}
def __init__(self, backend, class_info, schema=None,
simulation_classes=None, fallbacks=None, safemode=False):
super().__init__()
GeneralStorage._known_storages[backend.identifier] = self
self.backend = backend
self.schema = schema.copy()
self.class_info = class_info.copy()
self._safemode = None
self.safemode = safemode
self._sf_handler = StorageFunctionHandler(storage=self)
self.type_identification = STANDARD_TYPING # TODO: copy
# TODO: implement fallback
self.fallbacks = tools.none_to_default(fallbacks, [])
self.simulation_classes = tools.none_to_default(simulation_classes,
{})
# self._pseudo_tables = {table_name: dict()
# for table_name in self.simulation_classes}
self._simulation_objects = {}
self._pseudo_tables = {table_name: PseudoTable()
for table_name in self.simulation_classes}
self._pseudo_tables['misc_simulation'] = PseudoTable()
self._storage_tables = {} # stores .steps, .snapshots
# self.serialization = Serialization(self)
self.proxy_factory = ProxyObjectFactory(self, self.class_info)
self.cache = MixedCache()
if self.schema is None:
self.schema = backend.schema
self.cache = MixedCache({}) # initial empty cache so it exists
self.initialize_with_mode(self.mode)
self.tags = TagsTable(self)
self._simulation_objects = self._cache_simulation_objects()
self.cache = MixedCache(self._simulation_objects)
self._stashed = []
self._reset_fixed_cache()
@property
def mode(self):
return self.backend.mode
@property
def safemode(self):
return self._safemode
@safemode.setter
def safemode(self, value):
if value is self._safemode:
return
self.class_info.set_safemode(value)
def initialize_with_mode(self, mode):
if mode == 'r' or mode == 'a':
self.register_schema(self.schema, class_info_list=[],
read_mode=True)
missing = {k: v for k, v in self.backend.schema.items()
if k not in self.schema and k not in universal_schema}
self.schema.update(missing)
table_to_class = self.backend.table_to_class
self._load_missing_info_tables(table_to_class)
sim_objs = {get_uuid(obj): obj
for obj in self.simulation_objects}
sim_objs.update({get_uuid(obj): obj
for obj in self.storable_functions})
self._simulation_objects.update(sim_objs)
self._update_pseudo_tables(sim_objs)
elif mode == 'w':
self.register_schema(self.schema, class_info_list=[])
def _load_missing_info_tables(self, table_to_class):
missing_info_tables = [tbl for tbl in self.schema
if tbl not in self.class_info.tables]
n_missing = len(missing_info_tables)
logger.info("Missing info from %d dynamically-registered tables",
n_missing)
classes = [table_to_class[tbl] for tbl in missing_info_tables]
self.register_from_tables(missing_info_tables, classes)
missing_info_tables = [tbl for tbl in self.schema
if tbl not in self.class_info.tables]
logger.info("Successfully registered %d missing tables",
n_missing - len(missing_info_tables))
if missing_info_tables:
raise RuntimeError("Unable to register existing database "
+ "tables: " + str(missing_info_tables))
def register_from_tables(self, table_names, classes):
# override in subclass to handle special lookups
pass
def stash(self, objects):
objects = tools.listify(objects)
self._stashed.extend(objects)
def close(self):
# TODO: should sync on close
self.backend.close()
self._sf_handler.close()
for fallback in self.fallbacks:
fallback.close()
def register_schema(self, schema, class_info_list,
backend_metadata=None, read_mode=False):
# check validity
self.class_info.register_info(class_info_list, schema)
# for info in class_info_list:
# info.set_defaults(schema)
# self.class_info.add_class_info(info)
schema_types = [type_str for attr_list in schema.values()
for _, type_str in attr_list]
for type_str in schema_types:
backend_type = self.class_info.backend_type(type_str)
self.backend.register_type(type_str, backend_type)
if not read_mode or self.backend.table_to_class == {}:
table_to_class = {table: self.class_info[table].cls
for table in schema
if table not in ['uuid', 'tables']}
# here's where we add the class_info to the backend
self.backend.register_schema(schema, table_to_class,
backend_metadata)
self.schema.update(schema)
for table in self.schema:
self._storage_tables[table] = StorageTable(self, table)
# self.serialization.register_serialization(schema, self.class_info)
def register_from_instance(self, lookup, obj):
raise NotImplementedError("No way to register from an instance")
def register_missing_tables_for_objects(self, uuid_obj_dict):
# missing items are handled by the special_lookup
lookup_examples = set([])
for obj in uuid_obj_dict.values():
lookup = self.class_info.lookup_key(obj)
if lookup not in lookup_examples:
self.register_from_instance(lookup, obj)
lookup_examples |= {lookup}
def filter_existing_uuids(self, uuid_dict):
existing = self.backend.load_uuids_table(
uuids=list(uuid_dict.keys()),
ignore_missing=True
)
# "special" here indicates that we always try to re-save these, even
# if they've already been saved once. This is (currently) necessary
# for objects that contain mutable components (such as the way
# storable functions contain their results)
# TODO: make `special` customizable
special = set(self._sf_handler.canonical_functions.keys())
for uuid_row in existing:
uuid = uuid_row.uuid
if uuid not in special:
del uuid_dict[uuid_row.uuid]
return uuid_dict
def _uuids_by_table(self, input_uuids, cache, get_table_name):
# find all UUIDs we need to save with this object
logger.debug("Listing all objects to save")
uuids = {}
for uuid, obj in input_uuids.items():
uuids.update(get_all_uuids(obj, known_uuids=cache,
class_info=self.class_info))
logger.debug("Found %d objects" % len(uuids))
logger.debug("Deproxying proxy objects")
uuids = self._unproxy_lazies(uuids)
logger.debug("Checking if objects already exist in database")
uuids = self.filter_existing_uuids(uuids)
# group by table, then save appropriately
# by_table; convert a dict of {uuid: obj} to {table: {uuid: obj}}
by_table = collections.defaultdict(dict)
by_table.update(tools.dict_group_by(uuids,
key_extract=get_table_name))
return by_table
def _unproxy_lazies(self, uuid_mapping):
lazies = [uuid for uuid, obj in uuid_mapping.items()
if isinstance(obj, GenericLazyLoader)]
logger.debug("Found " + str(len(lazies)) + " objects to deproxy")
loaded = self.load(lazies, allow_lazy=False)
uuid_mapping.update({get_uuid(obj): obj for obj in loaded})
return uuid_mapping
def save(self, obj_list, use_cache=True):
if type(obj_list) is not list:
obj_list = [obj_list]
cache = self.cache if use_cache else {}
# TODO: convert the whole .save process to something based on the
# class_info.serialize method (enabling per-class approaches for
# finding UUIDs, which will be a massive serialization speed-up
# self.class_info.serialize(obj, storage=self)
# check if obj is in DB (maybe this can be removed?)
logger.debug("Starting save")
input_uuids = {get_uuid(obj): obj for obj in obj_list}
input_uuids = self.filter_existing_uuids(input_uuids)
if not input_uuids:
return # exit early if everything is already in storage
# check default table for things to register; register them
# TODO: move to function: self.register_missing(by_table)
# TODO: convert to while?
get_table_name = lambda uuid, obj_: self.class_info[obj_].table
by_table = self._uuids_by_table(input_uuids, cache, get_table_name)
old_missing = {}
while '__missing__' in by_table:
# __missing__ is a special result returned by the
# ClassInfoContainer if this is object is expected to have a
# table, but the table doesn't exist (e.g., for dynamically
# added tables)
missing = by_table.pop('__missing__')
if missing == old_missing:
raise RuntimeError("Unable to register: " + str(missing))
missing = self._unproxy_lazies(missing)
logger.info("Registering tables for %d missing objects",
len(missing))
self.register_missing_tables_for_objects(missing)
missing_by_table = tools.dict_group_by(missing, get_table_name)
logger.info("Registered %d new tables: %s",
len(missing_by_table),
str(list(missing_by_table.keys())))
by_table.update(missing_by_table)
# search for objects inside the objects we just registered
next_by_table = self._uuids_by_table(missing, cache,
get_table_name)
for table, uuid_dict in next_by_table.items():
by_table[table].update(uuid_dict)
old_missing = missing
# TODO: move to function self.store_sfr_results(by_table)
self.save_function_results() # always for canonical
has_sfr = (self.class_info.sfr_info is not None
and self.class_info.sfr_info.table in by_table)
if has_sfr:
func_results = by_table.pop(self.class_info.sfr_info.table)
logger.info("Saving results from %d storable functions",
len(func_results))
for result in func_results.values():
func = result.parent
self.save_function_results(func)
# TODO: add simulation objects to the cache
# this is the actual serialization
logger.debug("Filling %d tables: %s", len(by_table),
str(list(by_table.keys())))
for table in by_table:
logger.debug("Storing %d objects to table %s",
len(by_table[table]), table)
serialize = self.class_info[table].serializer
storables_list = [serialize(o) for o in by_table[table].values()]
self.backend.add_to_table(table, storables_list)
# special handling for simulation objects
if table == 'simulation_objects':
self._update_pseudo_tables(by_table[table])
self._simulation_objects.update(by_table[table])
self._reset_fixed_cache()
logger.debug("Storing complete")
def save_function_results(self, funcs=None):
# TODO: move this to sf_handler; where the equivalent load happens
# no equivalent load because user has no need -- any loading can be
# done by func, either as func(obj) or func.preload_cache()
if funcs is None:
funcs = list(self._sf_handler.canonical_functions.values())
funcs = tools.listify(funcs)
for func in funcs:
# TODO: XXX This is where we need to use type identification
# 1. check if the associated function is registered already
# 2. if not, extract type from the first function value
self._sf_handler.update_cache(func.local_cache)
result_dict = func.local_cache.result_dict
table_name = get_uuid(func)
if not self.backend.has_table(table_name):
if result_dict:
example = next(iter(result_dict.values()))
else:
example = None
self._sf_handler.register_function(func,
example_result=example)
if result_dict:
self.backend.add_storable_function_results(
table_name=table_name,
result_dict=result_dict
)
self._reset_fixed_cache()
def load(self, input_uuids, allow_lazy=True, force=False):
"""
Parameters
----------
input_uuids : List[str]
allow_lazy : bool
whether to allow lazy proxy objects
force : bool
force reloading this object even if it is already cached (used
for deproxying a lazy proxy object)
"""
# loading happens in 4 parts:
# 1. Get UUIDs that need to be loaded
# 2. Make lazy-loading proxy objects
# 3. Identify the order in which we deserialize
# 4. Deserialize
# set force=True to make it reload this full object (used for
# loading a lazy-loaded object)
if isinstance(input_uuids, basestring):
# TEMP: remove; for now, prevents my stupidity
raise RuntimeError("David, you forgot to wrap UUID in list")
logger.debug("Starting to load %d objects", len(input_uuids))
if force:
self.cache.delete_items(input_uuids)
results = {uuid: self.cache[uuid] for uuid in input_uuids
if uuid in self.cache}
uuid_list = [uuid for uuid in input_uuids if uuid not in self.cache]
logger.debug("Getting internal structure of %d non-cached objects",
len(uuid_list))
to_load, lazy_uuids, dependencies, uuid_to_table = \
get_all_uuids_loading(uuid_list=uuid_list,
backend=self.backend,
schema=self.schema,
existing_uuids=self.cache,
allow_lazy=allow_lazy)
logger.debug("Loading %d objects; creating %d lazy proxies",
len(to_load), len(lazy_uuids))
# to_load : List (table rows from backend)
# lazy : Set[str] (lazy obj UUIDs)
# dependencies : Dict[str, List[str]] (map UUID to contained UUIDs)
# uuid_to_table : Dict[str, str] (UUID to table name)
# make lazies
logger.debug("Identifying classes for %d lazy proxies",
len(lazy_uuids))
lazy_uuid_rows = self.backend.load_uuids_table(lazy_uuids)
lazies = tools.group_by_function(lazy_uuid_rows,
self.backend.uuid_row_to_table_name)
new_uuids = self.proxy_factory.make_all_lazies(lazies)
# get order and deserialize
uuid_to_table_row = {r.uuid: r for r in to_load}
ordered_uuids = get_reload_order(to_load, dependencies)
new_uuids = self.deserialize_uuids(ordered_uuids, uuid_to_table,
uuid_to_table_row, new_uuids)
self.cache.update(new_uuids)
results.update(new_uuids)
new_results = [results[uuid] for uuid in input_uuids]
# handle special case of storable functions
for result in new_results:
if isinstance(result, StorableFunction):
self._sf_handler.register_function(result)
return new_results
def deserialize_uuids(self, ordered_uuids, uuid_to_table,
uuid_to_table_row, new_uuids=None):
# TODO: remove this, replace with SerializationSchema
logger.debug("Reconstructing from %d objects", len(ordered_uuids))
new_uuids = tools.none_to_default(new_uuids, {})
for uuid in ordered_uuids:
if uuid not in self.cache and uuid not in new_uuids:
# is_in = [k for (k, v) in dependencies.items() if v==uuid]
table = uuid_to_table[uuid]
table_row = uuid_to_table_row[uuid]
table_dict = {attr: getattr(table_row, attr)
for (attr, type_name) in self.schema[table]}
deserialize = self.class_info[table].deserializer
obj = deserialize(uuid, table_dict, [new_uuids, self.cache])
new_uuids[uuid] = obj
return new_uuids
def sync(self):
pass
def sync_all(self):
pass
def _cache_simulation_objects(self):
# load up all the simulation objects
try:
backend_iter = itertools.chain(
self.backend.table_iterator('simulation_objects'),
self.backend.table_iterator('storable_functions')
)
sim_obj_uuids = [row.uuid for row in backend_iter]
except KeyError:
# TODO: this should probably be a custom error; don't rely on
# the error type this backend raises
# this happens if no simulation objects are given in the
# schema... there's not technically required
objs = []
else:
objs = self.load(sim_obj_uuids)
return {get_uuid(obj): obj for obj in objs}
def _reset_fixed_cache(self):
self.cache.fixed_cache = {}
self.cache.fixed_cache.update(self._simulation_objects)
self.cache.fixed_cache.update(self._sf_handler.canonical_functions)
def _update_pseudo_tables(self, simulation_objects):
# TODO: replace the pseudo_tables code here with a class
for uuid, obj in simulation_objects.items():
my_cls = None
for (key, cls) in self.simulation_classes.items():
if isinstance(obj, cls):
self._pseudo_tables[key].append(obj)
my_cls = cls
# self._pseudo_tables[key][uuid] = obj
# if obj.is_named:
# self._pseudo_tables[key][obj.name] = obj
if my_cls is None:
self._pseudo_tables['misc_simulation'].append(obj)
def summary(self, detailed=False):
"""Return a string summary of this storage file.
Parameters
----------
detailed : bool
whether to return the detailed description, where the simulation
objects are divided into their various pseudotables
"""
out_str = "File: " + self.backend.filename + "\n"
# TODO: add size to the first line
out_str += "Includes tables:\n"
storage_tables = dict(self._storage_tables) # make a copy
if detailed:
storage_tables.pop('simulation_objects')
storage_tables.update(self._pseudo_tables)
for (name, table) in storage_tables.items():
out_str += "* " + name + ": " + str(len(table)) + " items\n"
return out_str
def __getattr__(self, attr):
# override getattr to create iterators over the tables (stores)
if attr in self._storage_tables:
return self._storage_tables[attr]
elif attr in self._pseudo_tables:
return self._pseudo_tables[attr]
else:
raise AttributeError("'{}' object has no attribute '{}'"
.format(self.__class__.__name__, attr))
class MixedCache(abc.MutableMapping):
"""Combine a frozen cache and a mutable cache"""
# TODO: benchmark with single dict instead; might be just as fast!
def __init__(self, fixed_cache=None):
self.fixed_cache = tools.none_to_default(fixed_cache, default={})
self.cache = {}
def clear(self):
self.cache = {}
def delete_items(self, list_of_items, error_if_missing=False):
for item in list_of_items:
if item in self:
del self[item]
elif error_if_missing:
raise KeyError() # TODO: message and check error type
def reproxy(self, schema):
# TODO: idea: turn loaded objects back into None for any proxy
# objects in cache. This frees those things up for garbage
# collection.
pass
def __getitem__(self, key):
if key in self.fixed_cache:
value = self.fixed_cache[key]
else:
value = self.cache[key]
return value
def __setitem__(self, key, value):
self.cache[key] = value
def __delitem__(self, key):
try:
del self.cache[key]
except KeyError as err:
if key in self.fixed_cache:
raise TypeError("Can't delete from fixed cache")
else:
raise err
def __len__(self):
return len(self.fixed_cache) + len(self.cache)
def __iter__(self):
return itertools.chain(self.fixed_cache, self.cache)
class StorageTable(abc.Sequence):
# NOTE: currently you still need to be able to hold the whole table in
# memory ... at least, with the SQL backend.
def __init__(self, storage, table):
self.storage = storage
self.table = table
self.clear_cache_block_freq = 100
self.iter_block_size = 100 # TODO: base it on the size of an object
def __iter__(self):
# TODO: ensure that this gives us things in idx order
backend_iter = self.storage.backend.table_iterator(self.table)
enum_iter = enumerate(tools.grouper(backend_iter,
self.iter_block_size))
for block_num, block in enum_iter:
row_uuids = [row.uuid for row in block]
loaded = self.storage.load(row_uuids)
if block_num % self.clear_cache_block_freq == 0 and block_num != 0:
self.storage.cache.clear()
for obj in loaded:
yield obj
def __getitem__(self, item):
len_self = len(self)
if type(item) is slice: # Slice is not an acceptable base class
unpack_return = False
items = (i for i in range(*item.indices(len_self)))
elif type(item) is int:
unpack_return = True
if not (-len_self <= item < len_self):
raise IndexError("table index out of range")
if item < 0:
item += len(self)
items = [item]
else:
raise TypeError("Only access via slice or int allowed, "
f"got type {type(item).__name__}.")
uuids = [self.storage.backend.table_get_item(self.table, item).uuid
for item in items]
if unpack_return:
return self.storage.load(uuids)[0]
else:
return self.storage.load(uuids)
def __len__(self):
return self.storage.backend.table_len(self.table)
def cache_all(self):
old_blocksize = self.iter_block_size
self.iter_block_size = len(self)
_ = list(iter(self))
self.iter_block_size = old_blocksize
def save(self, obj):
# this is to match with the netcdfplus API
self.storage.save(obj)
# TODO: subclass for MCSteps with additional method .ordered, returning
# things in the order of the mccycle number -- also, manage special
# caching
class PseudoTable(abc.MutableSequence):
# TODO: use this in the main code
# NOTE: This will require that the storage class append to it
"""List of objects that can be retrieved by index or name.
PseudoTables are used to group simulation objects together.
"""
def __init__(self, sequence=None):
self._sequence = []
self._uuid_to_obj = {}
self._name_to_uuid = {}
sequence = tools.none_to_default(sequence, [])
for item in sequence:
self.append(item)
@staticmethod
def _get_uuid_and_name(obj):
uuid = get_uuid(obj)
try:
name = None if not obj.is_named else obj.name
except AttributeError:
# occurs if simulation object is not a StorableNamedObject
# (relevant for a few very old classes; should be fixed in 2.0)
name = None
return uuid, name
def get_by_uuid(self, uuid):
return self._uuid_to_obj[uuid]
# NOTE: .index() can get confusing because you can have two equal
# volumes (same CV, same range) with one named and the other not named.
def __getitem__(self, item):
try:
ret_val = self._sequence[item]
except TypeError as err:
if type(item) != str:
raise err
uuid = self._name_to_uuid[item]
ret_val = self.get_by_uuid(uuid)
return ret_val
def __setitem__(self, key, value):
# TODO: should this be allowed? or make it not really mutable, only
# appendable?
del self[key]
self.insert(key, value)
def __delitem__(self, key):
# TODO: should this be allowed? or make it not really mutable, only
# appendable?
item = self[key]
uuid, name = self._get_uuid_and_name(item)
del self._sequence[self._sequence.index(item)]
del self._uuid_to_obj[uuid]
del self._name_to_uuid[name]
def __len__(self):
return len(self._uuid_to_obj)
def __iter__(self):
return iter(self._uuid_to_obj.values())
def insert(self, where, item):
# TODO: should this be allowed? or make it not really mutable, only
# appendable?
uuid, name = self._get_uuid_and_name(item)
self._sequence.insert(where, item)
self._uuid_to_obj[uuid] = item
if name is not None:
self._name_to_uuid[name] = uuid
| mit | 47b8a006111a0ce5962ba34915d23be3 | 39.114613 | 79 | 0.579464 | 4.17039 | false | false | false | false |
openpathsampling/openpathsampling | openpathsampling/netcdfplus/cache.py | 4 | 13834 | from collections import OrderedDict
import weakref
__author__ = 'Jan-Hendrik Prinz'
class Cache(object):
"""
A cache like dict
"""
@property
def count(self):
"""
int : the number of strong references
int : the number of weak references
"""
return len(self), 0
@property
def size(self):
"""
int : the maximal number of strong references, -1 if infinite
int : the maximal number of weak references, -1 if infinite
"""
return -1, -1
def __str__(self):
size = self.count
maximum = self.size
return '%s(%d/%d of %s/%s)' % (
self.__class__.__name__,
size[0], size[1],
'Inf' if maximum[0] < 0 else str(maximum[0]),
'Inf' if maximum[1] < 0 else str(maximum[1])
)
def __getitem__(self, item):
raise KeyError("No items")
def __setitem__(self, key, value):
pass
def get(self, item, default=None):
"""
get value by key if it exists, None else
Parameters
----------
item : `object`
key to select element in cache
default : `object`
return value if item is not present in cache
Returns
-------
`object` or `None`
cached value at key item if present, returns default otherwise
"""
try:
return self[item]
except KeyError:
return default
def transfer(self, old_cache):
"""
Transfer values between caches
Useful if during run-time a cache is replaced by another instance
Parameters
----------
old_cache : the cache from which this cache is to be filled
"""
size = self.size
if size[0] == -1 or size[1] == -1:
for key in reversed(list(old_cache)):
try:
self[key] = old_cache[key]
except KeyError:
pass
else:
for key in reversed(list(old_cache)[0:size[0] + size[1]]):
try:
self[key] = old_cache[key]
except KeyError:
pass
return self
get_silent = get
class NoCache(Cache):
"""
A virtual cache the contains no elements
"""
def __init__(self):
super(NoCache, self).__init__()
def __getitem__(self, item):
raise KeyError('No Cache has no items')
def __contains__(self, item):
return False
def __setitem__(self, key, value):
pass
def __iter__(self):
return iter([])
@property
def count(self):
return 0, 0
@property
def size(self):
return 0, 0
def items(self):
return []
def transfer(self, old_cache):
return self
def clear(self):
pass
class MaxCache(dict, Cache):
"""
A dictionary, can hold infinite strong references
"""
def __init__(self):
super(MaxCache, self).__init__()
Cache.__init__(self)
@property
def count(self):
return len(self), 0
@property
def size(self):
return -1, 0
class LRUCache(Cache):
"""
Implements a simple Least Recently Used Cache
Very simple using collections.OrderedDict. The size can be changed during
run-time
"""
def __init__(self, size_limit):
super(LRUCache, self).__init__()
self._size_limit = size_limit
self._cache = OrderedDict()
@property
def count(self):
return len(self._cache), 0
@property
def size(self):
return self.size_limit, 0
@property
def size_limit(self):
return self._size_limit
@size_limit.setter
def size_limit(self, new_size):
if new_size < self.size_limit:
self._check_size_limit()
self._size_limit = new_size
def __iter__(self):
return iter(self._cache)
def __reversed__(self):
return reversed(self._cache)
def __getitem__(self, item):
obj = self._cache.pop(item)
self._cache[item] = obj
return obj
def __setitem__(self, key, value, **kwargs):
self._cache[key] = value
self._check_size_limit()
def _check_size_limit(self):
while len(self._cache) > self.size_limit:
self._cache.popitem(last=False)
def __contains__(self, item):
return item in self._cache
def clear(self):
self._cache.clear()
def __len__(self):
return len(self._cache)
class WeakLRUCache(Cache):
"""
Implements a cache that keeps weak references to all elements
In addition it uses a simple Least Recently Used Cache to make sure a
portion of the last used elements are still present. Usually this
number is 100.
"""
def __init__(self, size_limit=100, weak_type='value'):
"""
Parameters
----------
size_limit : int
integer that defines the size of the LRU cache. Default is 100.
"""
super(WeakLRUCache, self).__init__()
self._size_limit = size_limit
self.weak_type = weak_type
if weak_type == 'value':
self._weak_cache = weakref.WeakValueDictionary()
elif weak_type == 'key':
self._weak_cache = weakref.WeakKeyDictionary()
else:
raise ValueError("weak_type must be either 'key' or 'value'")
self._cache = OrderedDict()
@property
def count(self):
return len(self._cache), len(self._weak_cache)
@property
def size(self):
return self._size_limit, -1
def clear(self):
self._cache.clear()
self._weak_cache.clear()
@property
def size_limit(self):
return self._size_limit
def __getitem__(self, item):
try:
obj = self._cache.pop(item)
self._cache[item] = obj
return obj
except KeyError:
obj = self._weak_cache[item]
del self._weak_cache[item]
self._cache[item] = obj
self._check_size_limit()
return obj
@size_limit.setter
def size_limit(self, new_size):
if new_size < self.size_limit:
self._check_size_limit()
self._size_limit = new_size
def __setitem__(self, key, value, **kwargs):
try:
self._cache.pop(key)
except KeyError:
pass
self._cache[key] = value
self._check_size_limit()
def get_silent(self, item):
"""
Return item from the without reordering the LRU
Parameters
----------
item : object
the item index to be retrieved from the cache
Returns
-------
`object` or `None`
the requested object if it exists else `None`
"""
if item is None:
return None
try:
return self._cache[item]
except KeyError:
try:
return self._weak_cache[item]
except KeyError:
return None
def _check_size_limit(self):
if self.size_limit is not None:
while len(self._cache) > self.size_limit:
self._weak_cache.__setitem__(*self._cache.popitem(last=False))
def __contains__(self, item):
return item in self._cache or item in self._weak_cache
def keys(self):
return self._cache.keys() + self._weak_cache.keys()
def values(self):
return self._cache.values() + self._weak_cache.values()
def __len__(self):
return len(self._cache) + len(self._weak_cache)
def __iter__(self):
for key in self._cache.keys():
yield key
for key in self._weak_cache.keys():
yield key
def __reversed__(self):
for key in reversed(self._weak_cache):
yield key
for key in reversed(self._cache):
yield key
class WeakValueCache(weakref.WeakValueDictionary, Cache):
"""
Implements a cache that keeps weak references to all elements
"""
def __init__(self, *args, **kwargs):
weakref.WeakValueDictionary.__init__(self, *args, **kwargs)
Cache.__init__(self)
@property
def count(self):
return 0, len(self)
@property
def size(self):
return 0, -1
class WeakKeyCache(weakref.WeakKeyDictionary, Cache):
"""
Implements a cache that keeps weak references to all elements
"""
@property
def count(self):
return 0, len(self)
@property
def size(self):
return 0, -1
class LRUChunkLoadingCache(Cache):
"""
Implements a cache that keeps references loaded in chunks
"""
def __init__(self, chunksize=256, max_chunks=4*8192, variable=None):
super(LRUChunkLoadingCache, self).__init__()
self.max_chunks = max_chunks
self.chunksize = chunksize
self.variable = variable
self._chunkdict = OrderedDict()
self._firstchunk = 0
self._lastchunk = []
if variable is not None:
self._size = len(self.variable)
else:
self._size = 0
self._lastchunk_idx = self._size // self.chunksize
@property
def count(self):
return sum(map(len, self._chunkdict.values())), 0
@property
def size(self):
return self.max_chunks * self.chunksize, 0
def clear(self):
self._chunkdict.clear()
self._firstchunk = 0
self._lastchunk = []
def update_size(self, size=None):
"""
Update the knowledge of the size of the attached store
Parameters
----------
size : int
the new size to be used. If `None` (default) the size is taken
from the store directly using `len(store)`
Returns
-------
"""
if size is None:
self._size = len(self.variable)
else:
self._size = size
self._lastchunk_idx = (self._size - 1) // self.chunksize
def load_chunk(self, chunk_idx):
"""
Load a specific chunk
Parameters
----------
chunk_idx : int
the integer index of the chunk to be loaded from the attached
store. This might cause the last used chunk to be removed if the
maximal number of allowed chunks is reached
"""
if chunk_idx <= self._lastchunk_idx:
if chunk_idx not in self._chunkdict:
# chunk not cached, load full
left = chunk_idx * self.chunksize
right = min(self._size, left + self.chunksize)
self._chunkdict[chunk_idx] = []
self._chunkdict[chunk_idx].extend(self.variable[left:right])
self._check_size_limit()
elif len(self._chunkdict[chunk_idx]) < self.chunksize:
# incomplete chunk, load rest
chunk = self._chunkdict[chunk_idx]
left = chunk_idx * self.chunksize + len(chunk)
right = min(self._size, (chunk_idx + 1) * self.chunksize)
if right > left:
chunk.extend(self.variable[left:right])
def _update_chunk_order(self, chunk_idx):
chunk = self._chunkdict[chunk_idx]
del self._chunkdict[chunk_idx]
self._chunkdict[chunk_idx] = chunk
self._firstchunk = chunk_idx
def __getitem__(self, item):
chunksize = self.chunksize
chunk_idx = item // chunksize
if chunk_idx in self._chunkdict:
try:
obj = self._chunkdict[chunk_idx][item % chunksize]
if chunk_idx != self._firstchunk:
self._update_chunk_order(chunk_idx)
return obj
except IndexError:
pass
self.load_chunk(chunk_idx)
try:
return self._chunkdict[chunk_idx][item % chunksize]
except IndexError:
raise KeyError(item)
def load_max(self):
"""
Fill the cache with as many chunks as possible
"""
self.update_size()
map(self.load_chunk,
range(0, min(
1 + (self._size - 1) // self.chunksize,
self.max_chunks
)))
def __setitem__(self, key, value, **kwargs):
chunk_idx = key // self.chunksize
if chunk_idx in self._chunkdict:
chunk = self._chunkdict[chunk_idx]
else:
chunk = []
self._chunkdict[chunk_idx] = chunk
left = chunk_idx * self.chunksize + len(chunk)
right = key
if right > left:
chunk.extend(self.variable[left:right])
chunk.append(value)
if chunk_idx != self._firstchunk:
self._update_chunk_order(chunk_idx)
self._check_size_limit()
if key >= self._size:
self.update_size(key + 1)
def _check_size_limit(self):
if len(self._chunkdict) > self.max_chunks:
self._chunkdict.popitem(last=False)
def __contains__(self, item):
return any(item in chunk for chunk in self._chunkdict)
def keys(self):
return sum(map(lambda x: x.keys(), self._chunkdict))
def values(self):
return sum(map(lambda x: x.values(), self._chunkdict))
def __len__(self):
return sum(map(len, self._chunkdict))
def __iter__(self):
for chunk in self._chunkdict.values():
for key in chunk.keys():
yield key
def __reversed__(self):
for chunk in reversed(self._chunkdict.values()):
for key in reversed(chunk.keys()):
yield key
| mit | c567e645013385a9b5a20531f65f898a | 24.336996 | 78 | 0.536143 | 4.229288 | false | false | false | false |
openpathsampling/openpathsampling | openpathsampling/tests/test_interface_set.py | 3 | 9129 | from __future__ import absolute_import
from builtins import zip
from builtins import range
from builtins import object
from nose.tools import (assert_equal, assert_not_equal, assert_almost_equal,
raises)
from nose.plugins.skip import Skip, SkipTest
from .test_helpers import (
true_func, assert_equal_array_array, make_1d_traj, data_filename,
assert_items_equal
)
import openpathsampling as paths
from openpathsampling.high_level.interface_set import GenericVolumeInterfaceSet
import logging
logging.getLogger('openpathsampling.initialization').setLevel(logging.CRITICAL)
logging.getLogger('openpathsampling.ensemble').setLevel(logging.CRITICAL)
logging.getLogger('openpathsampling.storage').setLevel(logging.CRITICAL)
logging.getLogger('openpathsampling.netcdfplus').setLevel(logging.CRITICAL)
class TestInterfaceSet(object):
def setup(self):
paths.InterfaceSet._reset()
self.cv = paths.FunctionCV(name="x", f=lambda s: s.xyz[0][0])
self.lambdas = [0.0, 0.1, 0.2, 0.3]
min_vals= [float("-inf")] * len(self.lambdas)
self.volumes = [paths.CVDefinedVolume(self.cv, min_v, max_v)
for min_v, max_v in zip(min_vals, self.lambdas)]
self.interface_set = paths.InterfaceSet(self.volumes, self.cv,
self.lambdas)
self.decreasing = paths.InterfaceSet(list(reversed(self.volumes)),
self.cv,
list(reversed(self.lambdas)))
self.no_lambda_set = paths.InterfaceSet(self.volumes, self.cv)
def test_direction(self):
assert_equal(self.interface_set.direction, 1)
assert_equal(self.no_lambda_set.direction, 0)
assert_equal(self.decreasing.direction, -1)
def test_get_lambda(self):
for (v, l) in zip(self.volumes, self.lambdas):
assert_equal(self.interface_set.get_lambda(v), l)
assert_equal(self.no_lambda_set.get_lambda(v), None)
def test_list_behavior(self):
# len
assert_equal(len(self.interface_set), 4)
assert_equal(len(self.no_lambda_set), 4)
# getitem, contains
for i in range(4):
assert_equal(self.volumes[i], self.interface_set[i])
assert_equal(self.volumes[i] in self.interface_set, True)
# getitem for slices
sliced = self.interface_set[0:2]
for vol in sliced:
assert_equal(sliced.get_lambda(vol),
self.interface_set.get_lambda(vol))
# special case of -1 needs to work (used frequently!)
assert_equal(self.volumes[-1], self.interface_set[-1])
# iter
for vol in self.interface_set:
assert_equal(vol in self.volumes, True)
# reversed
i = 0
for vol in reversed(self.interface_set):
assert_equal(vol, self.volumes[3-i])
i += 1
def test_no_direction_possible(self):
min_vals=[-0.1, -0.2, -0.3]
max_vals=[0.1, 0.2, 0.3]
volumes = [paths.CVDefinedVolume(self.cv, min_v, max_v)
for min_v, max_v in zip(min_vals, max_vals)]
ifaces = paths.InterfaceSet(volumes)
assert_equal(ifaces.cv, None)
assert_equal(ifaces.cv_max, None)
assert_equal(ifaces.direction, 0)
class TestGenericVolumeInterfaceSet(object):
def test_sanitize_input(self):
# this is just to make the rest a little more readable
sanitize = GenericVolumeInterfaceSet._sanitize_input
assert_equal(([float("-inf")]*3, [0.0, 0.1, 0.2], 1),
sanitize(float("-inf"), [0.0, 0.1, 0.2]))
assert_equal(([0.2, 0.1, 0.0], [float("inf")]*3, -1),
sanitize([0.2, 0.1, 0.0], float("inf")))
assert_equal(([-0.1, -0.2], [0.1, 0.2], 0),
sanitize([-0.1, -0.2], [0.1, 0.2]))
assert_equal(([0.0, 0.0], [0.1, 0.2], 1),
sanitize([0.0, 0.0], [0.1, 0.2]))
assert_equal(([-0.1, -0.2], [0.0, 0.0], -1),
sanitize([-0.1, -0.2], [0.0, 0.0]))
# and the idiot case:
assert_equal(([-0.1, -0.1], [0.1, 0.1], 0),
sanitize([-0.1, -0.1], [0.1, 0.1]))
@raises(RuntimeError)
def test_bad_sanitize(self):
GenericVolumeInterfaceSet._sanitize_input([0.0, -0.1],
[0.1, 0.2, 0.3])
class TestVolumeInterfaceSet(object):
def setup(self):
paths.InterfaceSet._reset()
self.cv = paths.FunctionCV(name="x", f=lambda s: s.xyz[0][0])
self.increasing_set = paths.VolumeInterfaceSet(cv=self.cv,
minvals=float("-inf"),
maxvals=[0.0, 0.1])
self.decreasing_set = paths.VolumeInterfaceSet(cv=self.cv,
minvals=[0.0, -0.1],
maxvals=float("inf"))
self.weird_set = paths.VolumeInterfaceSet(cv=self.cv,
minvals=[-0.1, -0.2],
maxvals=[0.1, 0.2])
def test_initialization(self):
assert_equal(len(paths.InterfaceSet._cv_max_dict), 1)
cv_max = list(paths.InterfaceSet._cv_max_dict.values())[0]
assert_equal(len(self.increasing_set), 2)
assert_equal(self.increasing_set.direction, 1)
assert_equal(self.increasing_set.lambdas, [0.0, 0.1])
assert_equal(self.increasing_set.cv_max, cv_max)
assert_equal(len(self.decreasing_set), 2)
assert_equal(self.decreasing_set.direction, -1)
assert_equal(self.decreasing_set.lambdas, [0.0, -0.1])
# TODO: decide what to do about cv_max for decreasing/weird
assert_equal(len(self.weird_set), 2)
assert_equal(self.weird_set.direction, 0)
assert_equal(self.weird_set.lambdas, None)
def test_new_interface(self):
new_iface = self.increasing_set.new_interface(0.25)
expected = paths.CVDefinedVolume(self.cv, float("-inf"), 0.25)
assert_equal(expected, new_iface)
@raises(TypeError)
def test_bad_new_interface(self):
self.weird_set.new_interface(0.25)
def test_storage(self):
import os
fname = data_filename("interface_set_storage_test.nc")
if os.path.isfile(fname):
os.remove(fname)
template_traj = make_1d_traj([0.0])
storage_w = paths.Storage(fname, "w")
storage_w.save(template_traj)
storage_w.save(self.increasing_set)
storage_w.sync_all()
storage_w.close()
storage_r = paths.AnalysisStorage(fname)
reloaded = storage_r.interfacesets[0]
assert_items_equal(reloaded.lambdas, self.increasing_set.lambdas)
for (truth, beauty) in zip(self.increasing_set, reloaded):
assert_equal(truth, beauty)
for (v, l) in zip(reloaded.volumes, reloaded.lambdas):
assert_equal(reloaded.get_lambda(v), l)
if os.path.isfile(fname):
os.remove(fname)
class TestPeriodicVolumeInterfaceSet(object):
def setup(self):
paths.InterfaceSet._reset()
self.cv = paths.FunctionCV(name="x", f=lambda s: s.xyz[0][0])
self.increasing_set = paths.PeriodicVolumeInterfaceSet(
cv=self.cv,
minvals=0.0,
maxvals=[100, 150, 200-360],
period_min=-180,
period_max=180
)
def test_initialization(self):
assert_equal(self.increasing_set.direction, 1)
assert_equal(len(self.increasing_set), 3)
assert_equal(self.increasing_set.lambdas, [100, 150, -160])
def test_new_interface(self):
new_iface = self.increasing_set.new_interface(-140)
expected = paths.PeriodicCVDefinedVolume(self.cv, 0.0, -140, -180, 180)
assert_equal(new_iface, expected)
def test_storage(self):
import os
fname = data_filename("interface_set_storage_test.nc")
if os.path.isfile(fname):
os.remove(fname)
template_traj = make_1d_traj([0.0])
template = template_traj[0]
storage_w = paths.Storage(fname, "w")
storage_w.save(template_traj)
storage_w.save(self.increasing_set)
storage_w.sync_all()
storage_r = paths.AnalysisStorage(fname)
reloaded = storage_r.interfacesets[0]
assert_items_equal(reloaded.lambdas, self.increasing_set.lambdas)
assert_equal(reloaded.period_min, self.increasing_set.period_min)
assert_equal(reloaded.period_max, self.increasing_set.period_max)
for (truth, beauty) in zip(self.increasing_set, reloaded):
assert_equal(truth, beauty)
for (v, l) in zip(reloaded.volumes, reloaded.lambdas):
assert_equal(reloaded.get_lambda(v), l)
storage_r.close()
storage_w.close()
if os.path.isfile(fname):
os.remove(fname)
| mit | fceb851af257832bbf670f9d3a6ba59d | 39.93722 | 79 | 0.580458 | 3.437123 | false | true | false | false |
jpadilla/pyjwt | jwt/utils.py | 2 | 3973 | import base64
import binascii
import re
from typing import Union
try:
from cryptography.hazmat.primitives.asymmetric.ec import EllipticCurve
from cryptography.hazmat.primitives.asymmetric.utils import (
decode_dss_signature,
encode_dss_signature,
)
except ModuleNotFoundError:
EllipticCurve = None
def force_bytes(value: Union[str, bytes]) -> bytes:
if isinstance(value, str):
return value.encode("utf-8")
elif isinstance(value, bytes):
return value
else:
raise TypeError("Expected a string value")
def base64url_decode(input: Union[str, bytes]) -> bytes:
if isinstance(input, str):
input = input.encode("ascii")
rem = len(input) % 4
if rem > 0:
input += b"=" * (4 - rem)
return base64.urlsafe_b64decode(input)
def base64url_encode(input: bytes) -> bytes:
return base64.urlsafe_b64encode(input).replace(b"=", b"")
def to_base64url_uint(val: int) -> bytes:
if val < 0:
raise ValueError("Must be a positive integer")
int_bytes = bytes_from_int(val)
if len(int_bytes) == 0:
int_bytes = b"\x00"
return base64url_encode(int_bytes)
def from_base64url_uint(val: Union[str, bytes]) -> int:
if isinstance(val, str):
val = val.encode("ascii")
data = base64url_decode(val)
return int.from_bytes(data, byteorder="big")
def number_to_bytes(num: int, num_bytes: int) -> bytes:
padded_hex = "%0*x" % (2 * num_bytes, num)
return binascii.a2b_hex(padded_hex.encode("ascii"))
def bytes_to_number(string: bytes) -> int:
return int(binascii.b2a_hex(string), 16)
def bytes_from_int(val: int) -> bytes:
remaining = val
byte_length = 0
while remaining != 0:
remaining >>= 8
byte_length += 1
return val.to_bytes(byte_length, "big", signed=False)
def der_to_raw_signature(der_sig: bytes, curve: EllipticCurve) -> bytes:
num_bits = curve.key_size
num_bytes = (num_bits + 7) // 8
r, s = decode_dss_signature(der_sig)
return number_to_bytes(r, num_bytes) + number_to_bytes(s, num_bytes)
def raw_to_der_signature(raw_sig: bytes, curve: EllipticCurve) -> bytes:
num_bits = curve.key_size
num_bytes = (num_bits + 7) // 8
if len(raw_sig) != 2 * num_bytes:
raise ValueError("Invalid signature")
r = bytes_to_number(raw_sig[:num_bytes])
s = bytes_to_number(raw_sig[num_bytes:])
return encode_dss_signature(r, s)
# Based on https://github.com/hynek/pem/blob/7ad94db26b0bc21d10953f5dbad3acfdfacf57aa/src/pem/_core.py#L224-L252
_PEMS = {
b"CERTIFICATE",
b"TRUSTED CERTIFICATE",
b"PRIVATE KEY",
b"PUBLIC KEY",
b"ENCRYPTED PRIVATE KEY",
b"OPENSSH PRIVATE KEY",
b"DSA PRIVATE KEY",
b"RSA PRIVATE KEY",
b"RSA PUBLIC KEY",
b"EC PRIVATE KEY",
b"DH PARAMETERS",
b"NEW CERTIFICATE REQUEST",
b"CERTIFICATE REQUEST",
b"SSH2 PUBLIC KEY",
b"SSH2 ENCRYPTED PRIVATE KEY",
b"X509 CRL",
}
_PEM_RE = re.compile(
b"----[- ]BEGIN ("
+ b"|".join(_PEMS)
+ b""")[- ]----\r?
.+?\r?
----[- ]END \\1[- ]----\r?\n?""",
re.DOTALL,
)
def is_pem_format(key: bytes) -> bool:
return bool(_PEM_RE.search(key))
# Based on https://github.com/pyca/cryptography/blob/bcb70852d577b3f490f015378c75cba74986297b/src/cryptography/hazmat/primitives/serialization/ssh.py#L40-L46
_CERT_SUFFIX = b"-cert-v01@openssh.com"
_SSH_PUBKEY_RC = re.compile(rb"\A(\S+)[ \t]+(\S+)")
_SSH_KEY_FORMATS = [
b"ssh-ed25519",
b"ssh-rsa",
b"ssh-dss",
b"ecdsa-sha2-nistp256",
b"ecdsa-sha2-nistp384",
b"ecdsa-sha2-nistp521",
]
def is_ssh_key(key: bytes) -> bool:
if any(string_value in key for string_value in _SSH_KEY_FORMATS):
return True
ssh_pubkey_match = _SSH_PUBKEY_RC.match(key)
if ssh_pubkey_match:
key_type = ssh_pubkey_match.group(1)
if _CERT_SUFFIX == key_type[-len(_CERT_SUFFIX) :]:
return True
return False
| mit | 6fabfa91a96ded3469f839ff539693aa | 23.83125 | 157 | 0.631009 | 2.958302 | false | false | false | false |
theolind/pymysensors | tests/test_gateway_mqtt.py | 1 | 8474 | """Test mysensors MQTT gateway with unittest."""
import logging
import time
from unittest import mock
import pytest
from mysensors.gateway_mqtt import MQTTGateway
from mysensors.sensor import Sensor
# pylint: disable=redefined-outer-name
@pytest.fixture
def mock_pub():
"""Return a mock callback to publish to mqtt broker."""
return mock.Mock()
@pytest.fixture
def mock_sub():
"""Return a mock callback to subscribe to a mqtt topic."""
return mock.Mock()
@pytest.fixture
def gateway(mock_pub, mock_sub):
"""Yield gateway instance."""
_gateway = MQTTGateway(mock_pub, mock_sub)
yield _gateway
_gateway.tasks.stop()
def get_gateway(*args, **kwargs):
"""Return a gateway instance."""
return MQTTGateway(*args, **kwargs)
@pytest.fixture
def add_sensor(gateway):
"""Return function for adding node."""
def _add_sensor(sensor_id):
"""Add sensor node. Return sensor node instance."""
gateway.sensors[sensor_id] = Sensor(sensor_id)
return gateway.sensors[sensor_id]
return _add_sensor
def get_sensor(sensor_id, gateway):
"""Add sensor on gateway and return sensor instance."""
gateway.sensors[sensor_id] = Sensor(sensor_id)
return gateway.sensors[sensor_id]
def test_send(gateway, mock_pub):
"""Test send method."""
gateway.send("1;1;1;0;1;20\n")
assert mock_pub.call_count == 1
assert mock_pub.call_args == mock.call("/1/1/1/0/1", "20", 0, True)
def test_send_empty_string(gateway, mock_pub):
"""Test send method with empty string."""
gateway.send("")
assert mock_pub.call_count == 0
def test_send_error(gateway, mock_pub, caplog):
"""Test send method with error on publish."""
mock_pub.side_effect = ValueError("Publish topic cannot contain wildcards.")
caplog.set_level(logging.ERROR)
gateway.send("1;1;1;0;1;20\n")
assert mock_pub.call_count == 1
assert mock_pub.call_args == mock.call("/1/1/1/0/1", "20", 0, True)
assert (
"Publish to /1/1/1/0/1 failed: "
"Publish topic cannot contain wildcards." in caplog.text
)
def test_recv(gateway, add_sensor):
"""Test recv method."""
sensor = add_sensor(1)
sensor.add_child_sensor(1, gateway.const.Presentation.S_HUM)
sensor.children[1].values[gateway.const.SetReq.V_HUM] = "20"
gateway.tasks.transport.recv("/1/1/2/0/1", "", 0)
ret = gateway.tasks.run_job()
assert ret == "1;1;1;0;1;20\n"
gateway.tasks.transport.recv("/1/1/2/0/1", "", 1)
ret = gateway.tasks.run_job()
assert ret == "1;1;1;1;1;20\n"
def test_recv_wrong_prefix(gateway, add_sensor):
"""Test recv method with wrong topic prefix."""
sensor = add_sensor(1)
sensor.add_child_sensor(1, gateway.const.Presentation.S_HUM)
sensor.children[1].values[gateway.const.SetReq.V_HUM] = "20"
gateway.tasks.transport.recv("wrong/1/1/2/0/1", "", 0)
ret = gateway.tasks.run_job()
assert ret is None
def test_presentation(gateway, add_sensor, mock_sub):
"""Test handle presentation message."""
add_sensor(1)
gateway.logic("1;1;0;0;7;Humidity Sensor\n")
calls = [
mock.call("/1/1/1/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/2/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/+/4/+/+", gateway.tasks.transport.recv, 0),
]
assert mock_sub.call_count == 3
assert mock_sub.mock_calls == calls
def test_presentation_no_sensor(gateway, mock_sub):
"""Test handle presentation message without sensor."""
gateway.logic("1;1;0;0;7;Humidity Sensor\n")
assert mock_sub.call_count == 0
def test_subscribe_error(gateway, add_sensor, mock_sub, caplog):
"""Test subscribe throws error."""
add_sensor(1)
mock_sub.side_effect = ValueError("No topic specified, or incorrect topic type.")
caplog.set_level(logging.ERROR)
gateway.logic("1;1;0;0;7;Humidity Sensor\n")
calls = [
mock.call("/1/1/1/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/2/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/+/4/+/+", gateway.tasks.transport.recv, 0),
]
assert mock_sub.call_count == 3
assert mock_sub.mock_calls == calls
assert (
"Subscribe to /1/1/1/+/+ failed: "
"No topic specified, or incorrect topic type." in caplog.text
)
@mock.patch("mysensors.persistence.Persistence.safe_load_sensors")
@mock.patch("mysensors.persistence.Persistence.save_sensors")
def test_start_stop_gateway(mock_save, mock_load, mock_pub, mock_sub):
"""Test start and stop of MQTT gateway."""
gateway = get_gateway(mock_pub, mock_sub, persistence=True)
mock_schedule_save = mock.MagicMock()
gateway.tasks.persistence.schedule_save_sensors = mock_schedule_save
sensor = get_sensor(1, gateway)
sensor.add_child_sensor(1, gateway.const.Presentation.S_HUM)
sensor.children[1].values[gateway.const.SetReq.V_HUM] = "20"
# should generate a publish of 20
gateway.tasks.transport.recv("/1/1/2/0/1", "", 0)
gateway.tasks.transport.recv("/1/1/1/0/1", "30", 0)
# should generate a publish of 30
gateway.tasks.transport.recv("/1/1/2/0/1", "", 0)
gateway.start_persistence()
assert mock_load.call_count == 1
assert mock_schedule_save.call_count == 1
gateway.start()
time.sleep(0.05)
calls = [
mock.call("/+/+/0/+/+", gateway.tasks.transport.recv, 0),
mock.call("/+/+/3/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/1/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/2/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/+/4/+/+", gateway.tasks.transport.recv, 0),
]
assert mock_sub.call_count == 5
assert mock_sub.mock_calls == calls
calls = [
mock.call("/1/1/1/0/1", "20", 0, True),
mock.call("/1/1/1/0/1", "30", 0, True),
]
assert mock_pub.call_count == 2
assert mock_pub.mock_calls == calls
gateway.stop()
assert mock_save.call_count == 1
def test_mqtt_load_persistence(mock_pub, mock_sub, tmpdir):
"""Test load persistence file for MQTTGateway."""
gateway = get_gateway(mock_pub, mock_sub, persistence=True)
sensor = get_sensor(1, gateway)
sensor.add_child_sensor(1, gateway.const.Presentation.S_HUM)
sensor.children[1].values[gateway.const.SetReq.V_HUM] = "20"
persistence_file = tmpdir.join("file.json")
gateway.tasks.persistence.persistence_file = persistence_file.strpath
gateway.tasks.persistence.save_sensors()
del gateway.sensors[1]
assert 1 not in gateway.sensors
gateway.tasks.persistence.safe_load_sensors()
gateway.init_topics()
assert gateway.sensors[1].children[1].id == sensor.children[1].id
assert gateway.sensors[1].children[1].type == sensor.children[1].type
assert gateway.sensors[1].children[1].values == sensor.children[1].values
calls = [
mock.call("/+/+/0/+/+", gateway.tasks.transport.recv, 0),
mock.call("/+/+/3/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/1/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/1/2/+/+", gateway.tasks.transport.recv, 0),
mock.call("/1/+/4/+/+", gateway.tasks.transport.recv, 0),
]
assert mock_sub.call_count == 5
assert mock_sub.mock_calls == calls
def test_nested_prefix(mock_pub, mock_sub):
"""Test recv and send method with nested topic prefix."""
gateway = get_gateway(
mock_pub, mock_sub, in_prefix="test/test-in", out_prefix="test/test-out"
)
sensor = get_sensor(1, gateway)
sensor.add_child_sensor(1, gateway.const.Presentation.S_HUM)
sensor.children[1].values[gateway.const.SetReq.V_HUM] = "20"
gateway.tasks.transport.recv("test/test-in/1/1/2/0/1", "", 0)
ret = gateway.tasks.run_job()
assert ret == "1;1;1;0;1;20\n"
gateway.tasks.transport.send(ret)
assert mock_pub.call_args == mock.call("test/test-out/1/1/1/0/1", "20", 0, True)
gateway.tasks.transport.recv("test/test-in/1/1/2/0/1", "", 1)
ret = gateway.tasks.run_job()
assert ret == "1;1;1;1;1;20\n"
gateway.tasks.transport.send(ret)
assert mock_pub.call_args == mock.call("test/test-out/1/1/1/1/1", "20", 1, True)
def test_get_gateway_id(mock_pub, mock_sub):
"""Test get_gateway_id method."""
gateway = get_gateway(
mock_pub, mock_sub, in_prefix="test/test-in", out_prefix="test/test-out"
)
gateway_id = gateway.get_gateway_id()
assert gateway_id == "test/test-in"
| mit | 51ae479e66e8284266ad1633154e6ccc | 34.755274 | 85 | 0.64633 | 3.144341 | false | true | false | false |
theolind/pymysensors | mysensors/cli/gateway_serial.py | 1 | 1251 | """Start a serial gateway."""
import click
from mysensors.cli.helper import (
common_gateway_options,
handle_msg,
run_async_gateway,
run_gateway,
)
from mysensors.gateway_serial import AsyncSerialGateway, SerialGateway
def common_serial_options(func):
"""Supply common serial gateway options."""
func = click.option(
"-b",
"--baud",
default=115200,
show_default=True,
type=int,
help="Baud rate of the serial connection.",
)(func)
func = click.option(
"-p", "--port", required=True, help="Serial port of the gateway."
)(func)
return func
@click.command(options_metavar="<options>")
@common_serial_options
@common_gateway_options
def serial_gateway(**kwargs):
"""Start a serial gateway."""
gateway = SerialGateway(event_callback=handle_msg, **kwargs)
run_gateway(gateway)
@click.command(options_metavar="<options>")
@common_serial_options
@common_gateway_options
def async_serial_gateway(**kwargs):
"""Start an async serial gateway."""
async def gateway_factory():
"""Create the async serial gateway."""
return AsyncSerialGateway(event_callback=handle_msg, **kwargs), None
run_async_gateway(gateway_factory)
| mit | 7bb64e7663b5793d60f94e6342e87505 | 25.0625 | 76 | 0.665867 | 3.897196 | false | false | false | false |
dashwav/nano-chan | cogs/utils/helpers.py | 1 | 3379 | """
Helper functions for nano-chan.
Did take a few from kitsu-chan and modify them tbh
"""
import asyncio
import discord
from discord.ext import commands
async def confirm(ctx: commands.Context, member_to_kick, reason):
"""
Yes no helper. Ask a confirmation message with a timeout of 5 seconds.
ctx - The context in which the question is being asked.
message - Optional messsage that the question should ask.
"""
message = create_confirm_embed(ctx, ctx.guild, member_to_kick, reason)
confirmation_message = await ctx.send(embed=message, delete_after=10)
try:
message = await ctx.bot.wait_for("message",
timeout=10,
check=lambda message: message.author
== ctx.message.author)
except asyncio.TimeoutError:
return False
if message.clean_content.lower() != 'confirm':
return False
try:
await confirmation_message.delete()
await message.delete()
except Exception as e:
ctx.bot.logger.warning(f'Error in deleting message: {e}')
return True
async def custom_confirm(ctx: commands.Context, custom_message):
"""
Yes no helper. Ask a confirmation message with a timeout of 5 seconds.
ctx - The context in which the question is being asked.
message - Optional messsage that the question should ask.
"""
message = create_custom_embed(ctx, custom_message)
confirmation_message = await ctx.send(embed=message, delete_after=10)
try:
message = await ctx.bot.wait_for("message",
timeout=10,
check=lambda message: message.author
== ctx.message.author)
except asyncio.TimeoutError:
return False
if message.clean_content.lower() != 'confirm':
return False
try:
await confirmation_message.delete()
await message.delete()
except Exception as e:
ctx.bot.logger.warning(f'Error in deleting message: {e}')
return True
def create_confirm_embed(ctx, server_name, member_to_kick, reason):
embed = discord.Embed(
title=f'❗ Confirmation Request ❗',
type='rich')
embed.description = f'\nYou are attempting to {ctx.command}'\
f'**{member_to_kick}** from **{server_name}**'\
f'\n```{str(ctx.command).title()} '\
f'reason:\n\n{reason}```'\
f'\n➡️ Type `confirm` to {ctx.command} the user,'\
' or literally anything else to cancel.'\
'\n\n*You have 10 seconds...*'
embed.add_field(name='ID', value=member_to_kick.id)
return embed
def create_custom_embed(ctx, custom_message):
embed = discord.Embed(
title=f'❗ Confirmation Request ❗',
type='rich')
embed.description = f'\nYou are attempting to {ctx.command}:\n'\
f'{custom_message}'\
f'\n➡️ Type `confirm` to {ctx.command}'\
' or literally anything else to cancel.'\
'\n\n*You have 10 seconds...*'
return embed
| mit | ab0661a33e1e90e69ccde24eb2c28404 | 38.035714 | 77 | 0.558727 | 4.278626 | false | false | false | false |
gocardless/gocardless-pro-python | tests/integration/institutions_integration_test.py | 1 | 3986 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
import json
import requests
import responses
from nose.tools import (
assert_equal,
assert_is_instance,
assert_is_none,
assert_is_not_none,
assert_not_equal,
assert_raises
)
from gocardless_pro.errors import MalformedResponseError
from gocardless_pro import resources
from gocardless_pro import list_response
from .. import helpers
@responses.activate
def test_institutions_list():
fixture = helpers.load_fixture('institutions')['list']
helpers.stub_response(fixture)
response = helpers.client.institutions.list(*fixture['url_params'])
body = fixture['body']['institutions']
assert_is_instance(response, list_response.ListResponse)
assert_is_instance(response.records[0], resources.Institution)
assert_equal(response.before, fixture['body']['meta']['cursors']['before'])
assert_equal(response.after, fixture['body']['meta']['cursors']['after'])
assert_is_none(responses.calls[-1].request.headers.get('Idempotency-Key'))
assert_equal([r.bank_redirect for r in response.records],
[b.get('bank_redirect') for b in body])
assert_equal([r.country_code for r in response.records],
[b.get('country_code') for b in body])
assert_equal([r.icon_url for r in response.records],
[b.get('icon_url') for b in body])
assert_equal([r.id for r in response.records],
[b.get('id') for b in body])
assert_equal([r.logo_url for r in response.records],
[b.get('logo_url') for b in body])
assert_equal([r.name for r in response.records],
[b.get('name') for b in body])
@responses.activate
def test_timeout_institutions_list_retries():
fixture = helpers.load_fixture('institutions')['list']
with helpers.stub_timeout_then_response(fixture) as rsps:
response = helpers.client.institutions.list(*fixture['url_params'])
assert_equal(2, len(rsps.calls))
assert_equal(rsps.calls[0].request.headers.get('Idempotency-Key'),
rsps.calls[1].request.headers.get('Idempotency-Key'))
body = fixture['body']['institutions']
assert_is_instance(response, list_response.ListResponse)
assert_is_instance(response.records[0], resources.Institution)
assert_equal(response.before, fixture['body']['meta']['cursors']['before'])
assert_equal(response.after, fixture['body']['meta']['cursors']['after'])
def test_502_institutions_list_retries():
fixture = helpers.load_fixture('institutions')['list']
with helpers.stub_502_then_response(fixture) as rsps:
response = helpers.client.institutions.list(*fixture['url_params'])
assert_equal(2, len(rsps.calls))
assert_equal(rsps.calls[0].request.headers.get('Idempotency-Key'),
rsps.calls[1].request.headers.get('Idempotency-Key'))
body = fixture['body']['institutions']
assert_is_instance(response, list_response.ListResponse)
assert_is_instance(response.records[0], resources.Institution)
assert_equal(response.before, fixture['body']['meta']['cursors']['before'])
assert_equal(response.after, fixture['body']['meta']['cursors']['after'])
@responses.activate
def test_institutions_all():
fixture = helpers.load_fixture('institutions')['list']
def callback(request):
if 'after=123' in request.url:
fixture['body']['meta']['cursors']['after'] = None
else:
fixture['body']['meta']['cursors']['after'] = '123'
return [200, {}, json.dumps(fixture['body'])]
url = 'http://example.com' + fixture['path_template']
responses.add_callback(fixture['method'], url, callback)
all_records = list(helpers.client.institutions.all())
assert_equal(len(all_records), len(fixture['body']['institutions']) * 2)
for record in all_records:
assert_is_instance(record, resources.Institution)
| mit | 780d6045fd9e15b0ad7295f8b90b157e | 38.078431 | 79 | 0.672102 | 3.568487 | false | false | false | false |
maxking/docker-mailman | web/mailman-web/settings.py | 1 | 12645 | # -*- coding: utf-8 -*-
# Copyright (C) 1998-2016 by the Free Software Foundation, Inc.
#
# This file is part of Mailman Suite.
#
# Mailman Suite is free sofware: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# Mailman Suite is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
# You should have received a copy of the GNU General Public License along
# with Mailman Suite. If not, see <http://www.gnu.org/licenses/>.
"""
Django Settings for Mailman Suite (hyperkitty + postorius)
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import dj_database_url
import sys
from socket import gethostbyname
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
ADMINS = (
('Mailman Suite Admin', 'root@localhost'),
)
SITE_ID = 1
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/3.1/ref/settings/#allowed-hosts
ALLOWED_HOSTS = [
"localhost", # Archiving API from Mailman, keep it.
# "lists.your-domain.org",
# Add here all production URLs you may have.
"mailman-web",
gethostbyname("mailman-web"),
os.environ.get('SERVE_FROM_DOMAIN'),
os.environ.get('DJANGO_ALLOWED_HOSTS'),
]
# Mailman API credentials
MAILMAN_REST_API_URL = os.environ.get('MAILMAN_REST_URL', 'http://mailman-core:8001')
MAILMAN_REST_API_USER = os.environ.get('MAILMAN_REST_USER', 'restadmin')
MAILMAN_REST_API_PASS = os.environ.get('MAILMAN_REST_PASSWORD', 'restpass')
MAILMAN_ARCHIVER_KEY = os.environ.get('HYPERKITTY_API_KEY')
MAILMAN_ARCHIVER_FROM = (os.environ.get('MAILMAN_HOST_IP', gethostbyname(os.environ.get('MAILMAN_HOSTNAME', 'mailman-core'))),)
# Application definition
INSTALLED_APPS = []
DEFAULT_APPS = [
'hyperkitty',
'postorius',
'django_mailman3',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'django_gravatar',
'compressor',
'haystack',
'django_extensions',
'django_q',
'allauth',
'allauth.account',
'allauth.socialaccount',
]
MAILMAN_WEB_SOCIAL_AUTH = [
'django_mailman3.lib.auth.fedora',
'allauth.socialaccount.providers.openid',
'allauth.socialaccount.providers.github',
'allauth.socialaccount.providers.gitlab',
'allauth.socialaccount.providers.google',
]
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.locale.LocaleMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django_mailman3.middleware.TimezoneMiddleware',
'postorius.middleware.PostoriusMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.template.context_processors.csrf',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django_mailman3.context_processors.common',
'hyperkitty.context_processors.common',
'postorius.context_processors.postorius',
],
},
},
]
WSGI_APPLICATION = 'wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
# dj_database_url uses $DATABASE_URL environment variable to create a
# django-style-config-dict.
# https://github.com/kennethreitz/dj-database-url
DATABASES = {
'default': dj_database_url.config(conn_max_age=600)
}
# If you're behind a proxy, use the X-Forwarded-Host header
# See https://docs.djangoproject.com/en/1.8/ref/settings/#use-x-forwarded-host
USE_X_FORWARDED_HOST = True
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
STATIC_ROOT = '/opt/mailman-web-data/static'
STATIC_URL = '/static/'
# Additional locations of static files
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.PickleSerializer'
LOGIN_URL = 'account_login'
LOGIN_REDIRECT_URL = 'list_index'
LOGOUT_URL = 'account_logout'
# Use SERVE_FROM_DOMAIN as the default domain in the email.
hostname = os.environ.get('SERVE_FROM_DOMAIN', 'localhost.local')
DEFAULT_FROM_EMAIL = 'postorius@{}'.format(hostname)
SERVER_EMAIL = 'root@{}'.format(hostname)
# Change this when you have a real email backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = os.environ.get('SMTP_HOST', '')
EMAIL_PORT = os.environ.get('SMTP_PORT', 25)
EMAIL_HOST_USER = os.environ.get('SMTP_HOST_USER', '')
EMAIL_HOST_PASSWORD = os.environ.get('SMTP_HOST_PASSWORD', '')
EMAIL_USE_TLS = os.environ.get('SMTP_USE_TLS', False)
EMAIL_USE_SSL = os.environ.get('SMTP_USE_SSL', False)
# Compatibility with Bootstrap 3
from django.contrib.messages import constants as messages # flake8: noqa
MESSAGE_TAGS = {
messages.ERROR: 'danger'
}
#
# Social auth
#
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
# Django Allauth
ACCOUNT_AUTHENTICATION_METHOD = "username_email"
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
# You probably want https in production, but this is a dev setup file
ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https"
ACCOUNT_UNIQUE_EMAIL = True
SOCIALACCOUNT_PROVIDERS = {
'openid': {
'SERVERS': [
dict(id='yahoo',
name='Yahoo',
openid_url='http://me.yahoo.com'),
],
},
'google': {
'SCOPE': ['profile', 'email'],
'AUTH_PARAMS': {'access_type': 'online'},
},
'facebook': {
'METHOD': 'oauth2',
'SCOPE': ['email'],
'FIELDS': [
'email',
'name',
'first_name',
'last_name',
'locale',
'timezone',
],
'VERSION': 'v2.4',
},
}
# django-compressor
# https://pypi.python.org/pypi/django_compressor
#
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc {infile} {outfile}'),
('text/x-scss', 'sassc -t compressed {infile} {outfile}'),
('text/x-sass', 'sassc -t compressed {infile} {outfile}'),
)
# On a production setup, setting COMPRESS_OFFLINE to True will bring a
# significant performance improvement, as CSS files will not need to be
# recompiled on each requests. It means running an additional "compress"
# management command after each code upgrade.
# http://django-compressor.readthedocs.io/en/latest/usage/#offline-compression
# COMPRESS_OFFLINE = True
#
# Full-text search engine
#
HAYSTACK_CONNECTIONS = {
'default': {
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': "/opt/mailman-web-data/fulltext_index",
# You can also use the Xapian engine, it's faster and more accurate,
# but requires another library.
# http://django-haystack.readthedocs.io/en/v2.4.1/installing_search_engines.html#xapian
# Example configuration for Xapian:
#'ENGINE': 'xapian_backend.XapianEngine'
},
}
import sys
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'file':{
'level': 'INFO',
'class': 'logging.handlers.RotatingFileHandler',
#'class': 'logging.handlers.WatchedFileHandler',
'filename': os.environ.get('DJANGO_LOG_URL','/opt/mailman-web-data/logs/mailmanweb.log'),
'formatter': 'verbose',
},
'console': {
'class': 'logging.StreamHandler',
'formatter': 'simple',
'level': 'INFO',
'stream': sys.stdout,
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins', 'file'],
'level': 'INFO',
'propagate': True,
},
'django': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True,
},
'hyperkitty': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True,
},
'postorius': {
'handlers': ['file'],
'level': 'INFO',
'propagate': True
},
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(process)d %(name)s %(message)s'
},
'simple': {
'format': '%(levelname)s %(message)s'
},
},
}
if os.environ.get('LOG_TO_CONSOLE') == 'yes':
LOGGING['loggers']['django']['handlers'].append('console')
LOGGING['loggers']['django.request']['handlers'].append('console')
# HyperKitty-specific
#
# Only display mailing-lists from the same virtual host as the webserver
FILTER_VHOST = False
Q_CLUSTER = {
'timeout': 300,
'retry': 300,
'save_limit': 100,
'orm': 'default',
}
POSTORIUS_TEMPLATE_BASE_URL = os.environ.get('POSTORIUS_TEMPLATE_BASE_URL', 'http://mailman-web:8000')
DISKCACHE_PATH = os.environ.get('DISKCACHE_PATH', '/opt/mailman-web-data/diskcache')
DISKCACHE_SIZE = os.environ.get('DISKCACHE_SIZE', 2 ** 30) # 1 gigabyte
CACHES = {
'default': {
'BACKEND': 'diskcache.DjangoCache',
'LOCATION': DISKCACHE_PATH,
'OPTIONS': {
'size_limit': DISKCACHE_SIZE,
},
},
}
try:
from settings_local import *
except ImportError:
pass
# Compatibility for older installs that override INSTALLED_APPS
if not INSTALLED_APPS:
INSTALLED_APPS = DEFAULT_APPS + MAILMAN_WEB_SOCIAL_AUTH
| mit | f94ed7b90b6b918b71728aea5f05bd49 | 29.543478 | 127 | 0.652748 | 3.574053 | false | false | false | false |
gocardless/gocardless-pro-python | gocardless_pro/services/payments_service.py | 1 | 5991 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class PaymentsService(base_service.BaseService):
"""Service class that provides access to the payments
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.Payment
RESOURCE_NAME = 'payments'
def create(self,params=None, headers=None):
"""Create a payment.
<a name="mandate_is_inactive"></a>Creates a new payment object.
This fails with a `mandate_is_inactive` error if the linked
[mandate](#core-endpoints-mandates) is cancelled or has failed.
Payments can be created against mandates with status of:
`pending_customer_approval`, `pending_submission`, `submitted`, and
`active`.
Args:
params (dict, optional): Request body.
Returns:
Payment
"""
path = '/payments'
if params is not None:
params = {self._envelope_key(): params}
try:
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
except errors.IdempotentCreationConflictError as err:
if self.raise_on_idempotency_conflict:
raise err
return self.get(identity=err.conflicting_resource_id,
params=params,
headers=headers)
return self._resource_for(response)
def list(self,params=None, headers=None):
"""List payments.
Returns a [cursor-paginated](#api-usage-cursor-pagination) list of your
payments.
Args:
params (dict, optional): Query string parameters.
Returns:
ListResponse of Payment instances
"""
path = '/payments'
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def all(self, params=None):
if params is None:
params = {}
return Paginator(self, params)
def get(self,identity,params=None, headers=None):
"""Get a single payment.
Retrieves the details of a single existing payment.
Args:
identity (string): Unique identifier, beginning with "PM".
params (dict, optional): Query string parameters.
Returns:
Payment
"""
path = self._sub_url_params('/payments/:identity', {
'identity': identity,
})
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def update(self,identity,params=None, headers=None):
"""Update a payment.
Updates a payment object. This accepts only the metadata parameter.
Args:
identity (string): Unique identifier, beginning with "PM".
params (dict, optional): Request body.
Returns:
Payment
"""
path = self._sub_url_params('/payments/:identity', {
'identity': identity,
})
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('PUT', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def cancel(self,identity,params=None, headers=None):
"""Cancel a payment.
Cancels the payment if it has not already been submitted to the banks.
Any metadata supplied to this endpoint will be stored on the payment
cancellation event it causes.
This will fail with a `cancellation_failed` error unless the payment's
status is `pending_submission`.
Args:
identity (string): Unique identifier, beginning with "PM".
params (dict, optional): Request body.
Returns:
Payment
"""
path = self._sub_url_params('/payments/:identity/actions/cancel', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
def retry(self,identity,params=None, headers=None):
"""Retry a payment.
<a name="retry_failed"></a>Retries a failed payment if the underlying
mandate is active. You will receive a `resubmission_requested` webhook,
but after that retrying the payment follows the same process as its
initial creation, so you will receive a `submitted` webhook, followed
by a `confirmed` or `failed` event. Any metadata supplied to this
endpoint will be stored against the payment submission event it causes.
This will return a `retry_failed` error if the payment has not failed.
Payments can be retried up to 3 times.
Args:
identity (string): Unique identifier, beginning with "PM".
params (dict, optional): Request body.
Returns:
Payment
"""
path = self._sub_url_params('/payments/:identity/actions/retry', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
| mit | 06215932d043e90960411da7b380254e | 30.867021 | 79 | 0.566183 | 4.835351 | false | false | false | false |
gocardless/gocardless-pro-python | gocardless_pro/services/subscriptions_service.py | 1 | 9566 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class SubscriptionsService(base_service.BaseService):
"""Service class that provides access to the subscriptions
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.Subscription
RESOURCE_NAME = 'subscriptions'
def create(self,params=None, headers=None):
"""Create a subscription.
Creates a new subscription object
Args:
params (dict, optional): Request body.
Returns:
Subscription
"""
path = '/subscriptions'
if params is not None:
params = {self._envelope_key(): params}
try:
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
except errors.IdempotentCreationConflictError as err:
if self.raise_on_idempotency_conflict:
raise err
return self.get(identity=err.conflicting_resource_id,
params=params,
headers=headers)
return self._resource_for(response)
def list(self,params=None, headers=None):
"""List subscriptions.
Returns a [cursor-paginated](#api-usage-cursor-pagination) list of your
subscriptions.
Args:
params (dict, optional): Query string parameters.
Returns:
ListResponse of Subscription instances
"""
path = '/subscriptions'
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def all(self, params=None):
if params is None:
params = {}
return Paginator(self, params)
def get(self,identity,params=None, headers=None):
"""Get a single subscription.
Retrieves the details of a single subscription.
Args:
identity (string): Unique identifier, beginning with "SB".
params (dict, optional): Query string parameters.
Returns:
Subscription
"""
path = self._sub_url_params('/subscriptions/:identity', {
'identity': identity,
})
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def update(self,identity,params=None, headers=None):
"""Update a subscription.
Updates a subscription object.
This fails with:
- `validation_failed` if invalid data is provided when attempting to
update a subscription.
- `subscription_not_active` if the subscription is no longer active.
- `subscription_already_ended` if the subscription has taken all
payments.
- `mandate_payments_require_approval` if the amount is being changed
and the mandate requires approval.
- `number_of_subscription_amendments_exceeded` error if the
subscription amount has already been changed 10 times.
- `forbidden` if the amount is being changed, and the subscription was
created by an app and you are not authenticated as that app, or if the
subscription was not created by an app and you are authenticated as an
app
- `resource_created_by_another_app` if the app fee is being changed,
and the subscription was created by an app other than the app you are
authenticated as
Args:
identity (string): Unique identifier, beginning with "SB".
params (dict, optional): Request body.
Returns:
Subscription
"""
path = self._sub_url_params('/subscriptions/:identity', {
'identity': identity,
})
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('PUT', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def pause(self,identity,params=None, headers=None):
"""Pause a subscription.
Pause a subscription object.
No payments will be created until it is resumed.
This can only be used when a subscription is collecting a fixed number
of payments (created using `count`),
when they continue forever (created without `count` or `end_date`) or
the subscription is already paused for a number of cycles.
When `pause_cycles` is omitted the subscription is paused until the
[resume endpoint](#subscriptions-resume-a-subscription) is called.
If the subscription is collecting a fixed number of payments,
`end_date` will be set to `null`.
When paused indefinitely, `upcoming_payments` will be empty.
When `pause_cycles` is provided the subscription will be paused for the
number of cycles requested.
If the subscription is collecting a fixed number of payments,
`end_date` will be set to a new value.
When paused for a number of cycles, `upcoming_payments` will still
contain the upcoming charge dates.
This fails with:
- `forbidden` if the subscription was created by an app and you are not
authenticated as that app, or if the subscription was not created by an
app and you are authenticated as an app
- `validation_failed` if invalid data is provided when attempting to
pause a subscription.
- `subscription_paused_cannot_update_cycles` if the subscription is
already paused for a number of cycles and the request provides a value
for `pause_cycle`.
- `subscription_cannot_be_paused` if the subscription cannot be paused.
- `subscription_already_ended` if the subscription has taken all
payments.
- `pause_cycles_must_be_greater_than_or_equal_to` if the provided value
for `pause_cycles` cannot be satisfied.
Args:
identity (string): Unique identifier, beginning with "SB".
params (dict, optional): Request body.
Returns:
Subscription
"""
path = self._sub_url_params('/subscriptions/:identity/actions/pause', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
def resume(self,identity,params=None, headers=None):
"""Resume a subscription.
Resume a subscription object.
Payments will start to be created again based on the subscriptions
recurrence rules.
The `charge_date` on the next payment will be the same as the
subscriptions `earliest_charge_date_after_resume`
This fails with:
- `forbidden` if the subscription was created by an app and you are not
authenticated as that app, or if the subscription was not created by an
app and you are authenticated as an app
- `validation_failed` if invalid data is provided when attempting to
resume a subscription.
- `subscription_not_paused` if the subscription is not paused.
Args:
identity (string): Unique identifier, beginning with "SB".
params (dict, optional): Request body.
Returns:
Subscription
"""
path = self._sub_url_params('/subscriptions/:identity/actions/resume', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
def cancel(self,identity,params=None, headers=None):
"""Cancel a subscription.
Immediately cancels a subscription; no more payments will be created
under it. Any metadata supplied to this endpoint will be stored on the
payment cancellation event it causes.
This will fail with a cancellation_failed error if the subscription is
already cancelled or finished.
Args:
identity (string): Unique identifier, beginning with "SB".
params (dict, optional): Request body.
Returns:
Subscription
"""
path = self._sub_url_params('/subscriptions/:identity/actions/cancel', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
| mit | 79b49f77f3c54255c9c126438453df0d | 33.164286 | 80 | 0.587706 | 5.032088 | false | false | false | false |
gocardless/gocardless-pro-python | tests/integration/mandate_pdfs_integration_test.py | 1 | 2055 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
import json
import requests
import responses
from nose.tools import (
assert_equal,
assert_is_instance,
assert_is_none,
assert_is_not_none,
assert_not_equal,
assert_raises
)
from gocardless_pro.errors import MalformedResponseError
from gocardless_pro import resources
from gocardless_pro import list_response
from .. import helpers
@responses.activate
def test_mandate_pdfs_create():
fixture = helpers.load_fixture('mandate_pdfs')['create']
helpers.stub_response(fixture)
response = helpers.client.mandate_pdfs.create(*fixture['url_params'])
body = fixture['body']['mandate_pdfs']
assert_is_instance(response, resources.MandatePdf)
assert_is_not_none(responses.calls[-1].request.headers.get('Idempotency-Key'))
assert_equal(response.expires_at, body.get('expires_at'))
assert_equal(response.url, body.get('url'))
@responses.activate
def test_timeout_mandate_pdfs_create_retries():
fixture = helpers.load_fixture('mandate_pdfs')['create']
with helpers.stub_timeout_then_response(fixture) as rsps:
response = helpers.client.mandate_pdfs.create(*fixture['url_params'])
assert_equal(2, len(rsps.calls))
assert_equal(rsps.calls[0].request.headers.get('Idempotency-Key'),
rsps.calls[1].request.headers.get('Idempotency-Key'))
body = fixture['body']['mandate_pdfs']
assert_is_instance(response, resources.MandatePdf)
def test_502_mandate_pdfs_create_retries():
fixture = helpers.load_fixture('mandate_pdfs')['create']
with helpers.stub_502_then_response(fixture) as rsps:
response = helpers.client.mandate_pdfs.create(*fixture['url_params'])
assert_equal(2, len(rsps.calls))
assert_equal(rsps.calls[0].request.headers.get('Idempotency-Key'),
rsps.calls[1].request.headers.get('Idempotency-Key'))
body = fixture['body']['mandate_pdfs']
assert_is_instance(response, resources.MandatePdf)
| mit | b34ee36741e0897aff51c745d6e0fe3e | 33.25 | 82 | 0.711922 | 3.261905 | false | false | false | false |
gocardless/gocardless-pro-python | gocardless_pro/services/customer_notifications_service.py | 1 | 1612 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class CustomerNotificationsService(base_service.BaseService):
"""Service class that provides access to the customer_notifications
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.CustomerNotification
RESOURCE_NAME = 'customer_notifications'
def handle(self,identity,params=None, headers=None):
"""Handle a notification.
"Handling" a notification means that you have sent the notification
yourself (and
don't want GoCardless to send it).
If the notification has already been actioned, or the deadline to
notify has passed,
this endpoint will return an `already_actioned` error and you should
not take
further action. This endpoint takes no additional parameters.
Args:
identity (string): The id of the notification.
params (dict, optional): Request body.
Returns:
CustomerNotification
"""
path = self._sub_url_params('/customer_notifications/:identity/actions/handle', {
'identity': identity,
})
if params is not None:
params = {'data': params}
response = self._perform_request('POST', path, params, headers,
retry_failures=False)
return self._resource_for(response)
| mit | cc67de3ed16ed36977012056610b2d88 | 31.24 | 89 | 0.630893 | 4.855422 | false | false | false | false |
gocardless/gocardless-pro-python | gocardless_pro/services/creditors_service.py | 1 | 3406 | # WARNING: Do not edit by hand, this file was generated by Crank:
#
# https://github.com/gocardless/crank
#
from . import base_service
from .. import resources
from ..paginator import Paginator
from .. import errors
class CreditorsService(base_service.BaseService):
"""Service class that provides access to the creditors
endpoints of the GoCardless Pro API.
"""
RESOURCE_CLASS = resources.Creditor
RESOURCE_NAME = 'creditors'
def create(self,params=None, headers=None):
"""Create a creditor.
Creates a new creditor.
Args:
params (dict, optional): Request body.
Returns:
Creditor
"""
path = '/creditors'
if params is not None:
params = {self._envelope_key(): params}
try:
response = self._perform_request('POST', path, params, headers,
retry_failures=True)
except errors.IdempotentCreationConflictError as err:
if self.raise_on_idempotency_conflict:
raise err
return self.get(identity=err.conflicting_resource_id,
params=params,
headers=headers)
return self._resource_for(response)
def list(self,params=None, headers=None):
"""List creditors.
Returns a [cursor-paginated](#api-usage-cursor-pagination) list of your
creditors.
Args:
params (dict, optional): Query string parameters.
Returns:
ListResponse of Creditor instances
"""
path = '/creditors'
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def all(self, params=None):
if params is None:
params = {}
return Paginator(self, params)
def get(self,identity,params=None, headers=None):
"""Get a single creditor.
Retrieves the details of an existing creditor.
Args:
identity (string): Unique identifier, beginning with "CR".
params (dict, optional): Query string parameters.
Returns:
Creditor
"""
path = self._sub_url_params('/creditors/:identity', {
'identity': identity,
})
response = self._perform_request('GET', path, params, headers,
retry_failures=True)
return self._resource_for(response)
def update(self,identity,params=None, headers=None):
"""Update a creditor.
Updates a creditor object. Supports all of the fields supported when
creating a creditor.
Args:
identity (string): Unique identifier, beginning with "CR".
params (dict, optional): Request body.
Returns:
Creditor
"""
path = self._sub_url_params('/creditors/:identity', {
'identity': identity,
})
if params is not None:
params = {self._envelope_key(): params}
response = self._perform_request('PUT', path, params, headers,
retry_failures=True)
return self._resource_for(response)
| mit | bfc25009b96374eb495756be697df137 | 27.14876 | 79 | 0.551086 | 4.750349 | false | false | false | false |
gocardless/gocardless-pro-python | gocardless_pro/webhooks.py | 1 | 1372 | import json
import hmac
import hashlib
import sys
from gocardless_pro.resources.event import Event
from gocardless_pro.errors import InvalidSignatureError
# Python 3+ does not have the basestring type, so we alias it
try:
basestring
except:
basestring = str
# Python 3.0 < x < 3.4 does not support handing a mutable bytearray
# to the hmac constructor, so we need to make a record of it ...
SUPPORTS_BYTEARRAY = sys.version_info[0] == 2 or \
sys.version_info[1] > 3
def parse(body, webhook_secret, signature_header):
_verify_signature(body, webhook_secret, signature_header)
events_data = json.loads(to_string(body))
return [Event(attrs, None) for attrs in events_data['events']]
def _verify_signature(body, key, expected_signature):
digest = hmac.new(
to_bytes(key),
to_bytes(body),
hashlib.sha256
).hexdigest()
if not hmac.compare_digest(expected_signature, digest):
raise InvalidSignatureError()
def to_bytes(string):
if isinstance(string, basestring):
if SUPPORTS_BYTEARRAY:
return bytearray(string, 'utf-8')
return bytes(string, 'utf-8')
if SUPPORTS_BYTEARRAY:
return string
return bytes(string)
def to_string(byte_sequence):
if isinstance(byte_sequence, bytearray):
return byte_sequence.decode("utf-8")
return byte_sequence
| mit | f20172a2cdb0c549a8df8034f91bebf8 | 25.901961 | 67 | 0.691691 | 3.698113 | false | false | false | false |
zzzeek/mako | mako/ast.py | 9 | 6642 | # mako/ast.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""utilities for analyzing expressions and blocks of Python
code, as well as generating Python from AST nodes"""
import re
from mako import exceptions
from mako import pyparser
class PythonCode:
"""represents information about a string containing Python code"""
def __init__(self, code, **exception_kwargs):
self.code = code
# represents all identifiers which are assigned to at some point in
# the code
self.declared_identifiers = set()
# represents all identifiers which are referenced before their
# assignment, if any
self.undeclared_identifiers = set()
# note that an identifier can be in both the undeclared and declared
# lists.
# using AST to parse instead of using code.co_varnames,
# code.co_names has several advantages:
# - we can locate an identifier as "undeclared" even if
# its declared later in the same block of code
# - AST is less likely to break with version changes
# (for example, the behavior of co_names changed a little bit
# in python version 2.5)
if isinstance(code, str):
expr = pyparser.parse(code.lstrip(), "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindIdentifiers(self, **exception_kwargs)
f.visit(expr)
class ArgumentList:
"""parses a fragment of code as a comma-separated list of expressions"""
def __init__(self, code, **exception_kwargs):
self.codeargs = []
self.args = []
self.declared_identifiers = set()
self.undeclared_identifiers = set()
if isinstance(code, str):
if re.match(r"\S", code) and not re.match(r",\s*$", code):
# if theres text and no trailing comma, insure its parsed
# as a tuple by adding a trailing comma
code += ","
expr = pyparser.parse(code, "exec", **exception_kwargs)
else:
expr = code
f = pyparser.FindTuple(self, PythonCode, **exception_kwargs)
f.visit(expr)
class PythonFragment(PythonCode):
"""extends PythonCode to provide identifier lookups in partial control
statements
e.g.::
for x in 5:
elif y==9:
except (MyException, e):
"""
def __init__(self, code, **exception_kwargs):
m = re.match(r"^(\w+)(?:\s+(.*?))?:\s*(#|$)", code.strip(), re.S)
if not m:
raise exceptions.CompileException(
"Fragment '%s' is not a partial control statement" % code,
**exception_kwargs,
)
if m.group(3):
code = code[: m.start(3)]
(keyword, expr) = m.group(1, 2)
if keyword in ["for", "if", "while"]:
code = code + "pass"
elif keyword == "try":
code = code + "pass\nexcept:pass"
elif keyword in ["elif", "else"]:
code = "if False:pass\n" + code + "pass"
elif keyword == "except":
code = "try:pass\n" + code + "pass"
elif keyword == "with":
code = code + "pass"
else:
raise exceptions.CompileException(
"Unsupported control keyword: '%s'" % keyword,
**exception_kwargs,
)
super().__init__(code, **exception_kwargs)
class FunctionDecl:
"""function declaration"""
def __init__(self, code, allow_kwargs=True, **exception_kwargs):
self.code = code
expr = pyparser.parse(code, "exec", **exception_kwargs)
f = pyparser.ParseFunc(self, **exception_kwargs)
f.visit(expr)
if not hasattr(self, "funcname"):
raise exceptions.CompileException(
"Code '%s' is not a function declaration" % code,
**exception_kwargs,
)
if not allow_kwargs and self.kwargs:
raise exceptions.CompileException(
"'**%s' keyword argument not allowed here"
% self.kwargnames[-1],
**exception_kwargs,
)
def get_argument_expressions(self, as_call=False):
"""Return the argument declarations of this FunctionDecl as a printable
list.
By default the return value is appropriate for writing in a ``def``;
set `as_call` to true to build arguments to be passed to the function
instead (assuming locals with the same names as the arguments exist).
"""
namedecls = []
# Build in reverse order, since defaults and slurpy args come last
argnames = self.argnames[::-1]
kwargnames = self.kwargnames[::-1]
defaults = self.defaults[::-1]
kwdefaults = self.kwdefaults[::-1]
# Named arguments
if self.kwargs:
namedecls.append("**" + kwargnames.pop(0))
for name in kwargnames:
# Keyword-only arguments must always be used by name, so even if
# this is a call, print out `foo=foo`
if as_call:
namedecls.append("%s=%s" % (name, name))
elif kwdefaults:
default = kwdefaults.pop(0)
if default is None:
# The AST always gives kwargs a default, since you can do
# `def foo(*, a=1, b, c=3)`
namedecls.append(name)
else:
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
else:
namedecls.append(name)
# Positional arguments
if self.varargs:
namedecls.append("*" + argnames.pop(0))
for name in argnames:
if as_call or not defaults:
namedecls.append(name)
else:
default = defaults.pop(0)
namedecls.append(
"%s=%s"
% (name, pyparser.ExpressionGenerator(default).value())
)
namedecls.reverse()
return namedecls
@property
def allargnames(self):
return tuple(self.argnames) + tuple(self.kwargnames)
class FunctionArgs(FunctionDecl):
"""the argument portion of a function declaration"""
def __init__(self, code, **kwargs):
super().__init__("def ANON(%s):pass" % code, **kwargs)
| mit | 7fa59769ba5d294f10b225fb5f4901da | 31.881188 | 79 | 0.558416 | 4.338341 | false | false | false | false |
zzzeek/mako | examples/bench/basic.py | 2 | 6593 | # basic.py - basic benchmarks adapted from Genshi
# Copyright (C) 2006 Edgewall Software
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
# IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from io import StringIO
import sys
import timeit
__all__ = [
"mako",
"mako_inheritance",
"jinja2",
"jinja2_inheritance",
"cheetah",
"django",
"myghty",
"genshi",
"kid",
]
# Templates content and constants
TITLE = "Just a test"
USER = "joe"
ITEMS = ["Number %d" % num for num in range(1, 15)]
def genshi(dirname, verbose=False):
from genshi.template import TemplateLoader
loader = TemplateLoader([dirname], auto_reload=False)
template = loader.load("template.html")
def render():
data = dict(title=TITLE, user=USER, items=ITEMS)
return template.generate(**data).render("xhtml")
if verbose:
print(render())
return render
def myghty(dirname, verbose=False):
from myghty import interp
interpreter = interp.Interpreter(component_root=dirname)
def render():
data = dict(title=TITLE, user=USER, items=ITEMS)
buffer = StringIO()
interpreter.execute(
"template.myt", request_args=data, out_buffer=buffer
)
return buffer.getvalue()
if verbose:
print(render())
return render
def mako(dirname, verbose=False):
from mako.template import Template
from mako.lookup import TemplateLookup
lookup = TemplateLookup(directories=[dirname], filesystem_checks=False)
template = lookup.get_template("template.html")
def render():
return template.render(title=TITLE, user=USER, list_items=ITEMS)
if verbose:
print(template.code + " " + render())
return render
mako_inheritance = mako
def jinja2(dirname, verbose=False):
from jinja2 import Environment, FileSystemLoader
env = Environment(loader=FileSystemLoader(dirname))
template = env.get_template("template.html")
def render():
return template.render(title=TITLE, user=USER, list_items=ITEMS)
if verbose:
print(render())
return render
jinja2_inheritance = jinja2
def cheetah(dirname, verbose=False):
from Cheetah.Template import Template
filename = os.path.join(dirname, "template.tmpl")
template = Template(file=filename)
def render():
template.__dict__.update(
{"title": TITLE, "user": USER, "list_items": ITEMS}
)
return template.respond()
if verbose:
print(dir(template))
print(template.generatedModuleCode())
print(render())
return render
def django(dirname, verbose=False):
from django.conf import settings
settings.configure(TEMPLATE_DIRS=[os.path.join(dirname, "templates")])
from django import template, templatetags
from django.template import loader
templatetags.__path__.append(os.path.join(dirname, "templatetags"))
tmpl = loader.get_template("template.html")
def render():
data = {"title": TITLE, "user": USER, "items": ITEMS}
return tmpl.render(template.Context(data))
if verbose:
print(render())
return render
def kid(dirname, verbose=False):
import kid
kid.path = kid.TemplatePath([dirname])
template = kid.Template(file="template.kid")
def render():
template = kid.Template(
file="template.kid", title=TITLE, user=USER, items=ITEMS
)
return template.serialize(output="xhtml")
if verbose:
print(render())
return render
def run(engines, number=2000, verbose=False):
basepath = os.path.abspath(os.path.dirname(__file__))
for engine in engines:
dirname = os.path.join(basepath, engine)
if verbose:
print("%s:" % engine.capitalize())
print("--------------------------------------------------------")
else:
sys.stdout.write("%s:" % engine.capitalize())
t = timeit.Timer(
setup='from __main__ import %s; render = %s(r"%s", %s)'
% (engine, engine, dirname, verbose),
stmt="render()",
)
time = t.timeit(number=number) / number
if verbose:
print("--------------------------------------------------------")
print("%.2f ms" % (1000 * time))
if verbose:
print("--------------------------------------------------------")
if __name__ == "__main__":
engines = [arg for arg in sys.argv[1:] if arg[0] != "-"]
if not engines:
engines = __all__
verbose = "-v" in sys.argv
if "-p" in sys.argv:
try:
import hotshot, hotshot.stats
prof = hotshot.Profile("template.prof")
benchtime = prof.runcall(run, engines, number=100, verbose=verbose)
stats = hotshot.stats.load("template.prof")
except ImportError:
import cProfile, pstats
stmt = "run(%r, number=%r, verbose=%r)" % (engines, 1000, verbose)
cProfile.runctx(stmt, globals(), {}, "template.prof")
stats = pstats.Stats("template.prof")
stats.strip_dirs()
stats.sort_stats("time", "calls")
stats.print_stats()
else:
run(engines, verbose=verbose)
| mit | 21d7a4672e44ca82444f28862af23a81 | 28.433036 | 79 | 0.629455 | 4.120625 | false | false | false | false |
zzzeek/mako | mako/template.py | 9 | 23858 | # mako/template.py
# Copyright 2006-2022 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Provides the Template class, a facade for parsing, generating and executing
template strings, as well as template runtime operations."""
import json
import os
import re
import shutil
import stat
import tempfile
import types
import weakref
from mako import cache
from mako import codegen
from mako import compat
from mako import exceptions
from mako import runtime
from mako import util
from mako.lexer import Lexer
class Template:
r"""Represents a compiled template.
:class:`.Template` includes a reference to the original
template source (via the :attr:`.source` attribute)
as well as the source code of the
generated Python module (i.e. the :attr:`.code` attribute),
as well as a reference to an actual Python module.
:class:`.Template` is constructed using either a literal string
representing the template text, or a filename representing a filesystem
path to a source file.
:param text: textual template source. This argument is mutually
exclusive versus the ``filename`` parameter.
:param filename: filename of the source template. This argument is
mutually exclusive versus the ``text`` parameter.
:param buffer_filters: string list of filters to be applied
to the output of ``%def``\ s which are buffered, cached, or otherwise
filtered, after all filters
defined with the ``%def`` itself have been applied. Allows the
creation of default expression filters that let the output
of return-valued ``%def``\ s "opt out" of that filtering via
passing special attributes or objects.
:param cache_args: Dictionary of cache configuration arguments that
will be passed to the :class:`.CacheImpl`. See :ref:`caching_toplevel`.
:param cache_dir:
.. deprecated:: 0.6
Use the ``'dir'`` argument in the ``cache_args`` dictionary.
See :ref:`caching_toplevel`.
:param cache_enabled: Boolean flag which enables caching of this
template. See :ref:`caching_toplevel`.
:param cache_impl: String name of a :class:`.CacheImpl` caching
implementation to use. Defaults to ``'beaker'``.
:param cache_type:
.. deprecated:: 0.6
Use the ``'type'`` argument in the ``cache_args`` dictionary.
See :ref:`caching_toplevel`.
:param cache_url:
.. deprecated:: 0.6
Use the ``'url'`` argument in the ``cache_args`` dictionary.
See :ref:`caching_toplevel`.
:param default_filters: List of string filter names that will
be applied to all expressions. See :ref:`filtering_default_filters`.
:param enable_loop: When ``True``, enable the ``loop`` context variable.
This can be set to ``False`` to support templates that may
be making usage of the name "``loop``". Individual templates can
re-enable the "loop" context by placing the directive
``enable_loop="True"`` inside the ``<%page>`` tag -- see
:ref:`migrating_loop`.
:param encoding_errors: Error parameter passed to ``encode()`` when
string encoding is performed. See :ref:`usage_unicode`.
:param error_handler: Python callable which is called whenever
compile or runtime exceptions occur. The callable is passed
the current context as well as the exception. If the
callable returns ``True``, the exception is considered to
be handled, else it is re-raised after the function
completes. Is used to provide custom error-rendering
functions.
.. seealso::
:paramref:`.Template.include_error_handler` - include-specific
error handler function
:param format_exceptions: if ``True``, exceptions which occur during
the render phase of this template will be caught and
formatted into an HTML error page, which then becomes the
rendered result of the :meth:`.render` call. Otherwise,
runtime exceptions are propagated outwards.
:param imports: String list of Python statements, typically individual
"import" lines, which will be placed into the module level
preamble of all generated Python modules. See the example
in :ref:`filtering_default_filters`.
:param future_imports: String list of names to import from `__future__`.
These will be concatenated into a comma-separated string and inserted
into the beginning of the template, e.g. ``futures_imports=['FOO',
'BAR']`` results in ``from __future__ import FOO, BAR``. If you're
interested in using features like the new division operator, you must
use future_imports to convey that to the renderer, as otherwise the
import will not appear as the first executed statement in the generated
code and will therefore not have the desired effect.
:param include_error_handler: An error handler that runs when this template
is included within another one via the ``<%include>`` tag, and raises an
error. Compare to the :paramref:`.Template.error_handler` option.
.. versionadded:: 1.0.6
.. seealso::
:paramref:`.Template.error_handler` - top-level error handler function
:param input_encoding: Encoding of the template's source code. Can
be used in lieu of the coding comment. See
:ref:`usage_unicode` as well as :ref:`unicode_toplevel` for
details on source encoding.
:param lookup: a :class:`.TemplateLookup` instance that will be used
for all file lookups via the ``<%namespace>``,
``<%include>``, and ``<%inherit>`` tags. See
:ref:`usage_templatelookup`.
:param module_directory: Filesystem location where generated
Python module files will be placed.
:param module_filename: Overrides the filename of the generated
Python module file. For advanced usage only.
:param module_writer: A callable which overrides how the Python
module is written entirely. The callable is passed the
encoded source content of the module and the destination
path to be written to. The default behavior of module writing
uses a tempfile in conjunction with a file move in order
to make the operation atomic. So a user-defined module
writing function that mimics the default behavior would be:
.. sourcecode:: python
import tempfile
import os
import shutil
def module_writer(source, outputpath):
(dest, name) = \\
tempfile.mkstemp(
dir=os.path.dirname(outputpath)
)
os.write(dest, source)
os.close(dest)
shutil.move(name, outputpath)
from mako.template import Template
mytemplate = Template(
filename="index.html",
module_directory="/path/to/modules",
module_writer=module_writer
)
The function is provided for unusual configurations where
certain platform-specific permissions or other special
steps are needed.
:param output_encoding: The encoding to use when :meth:`.render`
is called.
See :ref:`usage_unicode` as well as :ref:`unicode_toplevel`.
:param preprocessor: Python callable which will be passed
the full template source before it is parsed. The return
result of the callable will be used as the template source
code.
:param lexer_cls: A :class:`.Lexer` class used to parse
the template. The :class:`.Lexer` class is used by
default.
.. versionadded:: 0.7.4
:param strict_undefined: Replaces the automatic usage of
``UNDEFINED`` for any undeclared variables not located in
the :class:`.Context` with an immediate raise of
``NameError``. The advantage is immediate reporting of
missing variables which include the name.
.. versionadded:: 0.3.6
:param uri: string URI or other identifier for this template.
If not provided, the ``uri`` is generated from the filesystem
path, or from the in-memory identity of a non-file-based
template. The primary usage of the ``uri`` is to provide a key
within :class:`.TemplateLookup`, as well as to generate the
file path of the generated Python module file, if
``module_directory`` is specified.
"""
lexer_cls = Lexer
def __init__(
self,
text=None,
filename=None,
uri=None,
format_exceptions=False,
error_handler=None,
lookup=None,
output_encoding=None,
encoding_errors="strict",
module_directory=None,
cache_args=None,
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
module_filename=None,
input_encoding=None,
module_writer=None,
default_filters=None,
buffer_filters=(),
strict_undefined=False,
imports=None,
future_imports=None,
enable_loop=True,
preprocessor=None,
lexer_cls=None,
include_error_handler=None,
):
if uri:
self.module_id = re.sub(r"\W", "_", uri)
self.uri = uri
elif filename:
self.module_id = re.sub(r"\W", "_", filename)
drive, path = os.path.splitdrive(filename)
path = os.path.normpath(path).replace(os.path.sep, "/")
self.uri = path
else:
self.module_id = "memory:" + hex(id(self))
self.uri = self.module_id
u_norm = self.uri
if u_norm.startswith("/"):
u_norm = u_norm[1:]
u_norm = os.path.normpath(u_norm)
if u_norm.startswith(".."):
raise exceptions.TemplateLookupException(
'Template uri "%s" is invalid - '
"it cannot be relative outside "
"of the root path." % self.uri
)
self.input_encoding = input_encoding
self.output_encoding = output_encoding
self.encoding_errors = encoding_errors
self.enable_loop = enable_loop
self.strict_undefined = strict_undefined
self.module_writer = module_writer
if default_filters is None:
self.default_filters = ["str"]
else:
self.default_filters = default_filters
self.buffer_filters = buffer_filters
self.imports = imports
self.future_imports = future_imports
self.preprocessor = preprocessor
if lexer_cls is not None:
self.lexer_cls = lexer_cls
# if plain text, compile code in memory only
if text is not None:
(code, module) = _compile_text(self, text, filename)
self._code = code
self._source = text
ModuleInfo(module, None, self, filename, code, text, uri)
elif filename is not None:
# if template filename and a module directory, load
# a filesystem-based module file, generating if needed
if module_filename is not None:
path = module_filename
elif module_directory is not None:
path = os.path.abspath(
os.path.join(
os.path.normpath(module_directory), u_norm + ".py"
)
)
else:
path = None
module = self._compile_from_file(path, filename)
else:
raise exceptions.RuntimeException(
"Template requires text or filename"
)
self.module = module
self.filename = filename
self.callable_ = self.module.render_body
self.format_exceptions = format_exceptions
self.error_handler = error_handler
self.include_error_handler = include_error_handler
self.lookup = lookup
self.module_directory = module_directory
self._setup_cache_args(
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
)
@util.memoized_property
def reserved_names(self):
if self.enable_loop:
return codegen.RESERVED_NAMES
else:
return codegen.RESERVED_NAMES.difference(["loop"])
def _setup_cache_args(
self,
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
):
self.cache_impl = cache_impl
self.cache_enabled = cache_enabled
self.cache_args = cache_args or {}
# transfer deprecated cache_* args
if cache_type:
self.cache_args["type"] = cache_type
if cache_dir:
self.cache_args["dir"] = cache_dir
if cache_url:
self.cache_args["url"] = cache_url
def _compile_from_file(self, path, filename):
if path is not None:
util.verify_directory(os.path.dirname(path))
filemtime = os.stat(filename)[stat.ST_MTIME]
if (
not os.path.exists(path)
or os.stat(path)[stat.ST_MTIME] < filemtime
):
data = util.read_file(filename)
_compile_module_file(
self, data, filename, path, self.module_writer
)
module = compat.load_module(self.module_id, path)
if module._magic_number != codegen.MAGIC_NUMBER:
data = util.read_file(filename)
_compile_module_file(
self, data, filename, path, self.module_writer
)
module = compat.load_module(self.module_id, path)
ModuleInfo(module, path, self, filename, None, None, None)
else:
# template filename and no module directory, compile code
# in memory
data = util.read_file(filename)
code, module = _compile_text(self, data, filename)
self._source = None
self._code = code
ModuleInfo(module, None, self, filename, code, None, None)
return module
@property
def source(self):
"""Return the template source code for this :class:`.Template`."""
return _get_module_info_from_callable(self.callable_).source
@property
def code(self):
"""Return the module source code for this :class:`.Template`."""
return _get_module_info_from_callable(self.callable_).code
@util.memoized_property
def cache(self):
return cache.Cache(self)
@property
def cache_dir(self):
return self.cache_args["dir"]
@property
def cache_url(self):
return self.cache_args["url"]
@property
def cache_type(self):
return self.cache_args["type"]
def render(self, *args, **data):
"""Render the output of this template as a string.
If the template specifies an output encoding, the string
will be encoded accordingly, else the output is raw (raw
output uses `StringIO` and can't handle multibyte
characters). A :class:`.Context` object is created corresponding
to the given data. Arguments that are explicitly declared
by this template's internal rendering method are also
pulled from the given ``*args``, ``**data`` members.
"""
return runtime._render(self, self.callable_, args, data)
def render_unicode(self, *args, **data):
"""Render the output of this template as a unicode object."""
return runtime._render(
self, self.callable_, args, data, as_unicode=True
)
def render_context(self, context, *args, **kwargs):
"""Render this :class:`.Template` with the given context.
The data is written to the context's buffer.
"""
if getattr(context, "_with_template", None) is None:
context._set_with_template(self)
runtime._render_context(self, self.callable_, context, *args, **kwargs)
def has_def(self, name):
return hasattr(self.module, "render_%s" % name)
def get_def(self, name):
"""Return a def of this template as a :class:`.DefTemplate`."""
return DefTemplate(self, getattr(self.module, "render_%s" % name))
def list_defs(self):
"""return a list of defs in the template.
.. versionadded:: 1.0.4
"""
return [i[7:] for i in dir(self.module) if i[:7] == "render_"]
def _get_def_callable(self, name):
return getattr(self.module, "render_%s" % name)
@property
def last_modified(self):
return self.module._modified_time
class ModuleTemplate(Template):
"""A Template which is constructed given an existing Python module.
e.g.::
t = Template("this is a template")
f = file("mymodule.py", "w")
f.write(t.code)
f.close()
import mymodule
t = ModuleTemplate(mymodule)
print(t.render())
"""
def __init__(
self,
module,
module_filename=None,
template=None,
template_filename=None,
module_source=None,
template_source=None,
output_encoding=None,
encoding_errors="strict",
format_exceptions=False,
error_handler=None,
lookup=None,
cache_args=None,
cache_impl="beaker",
cache_enabled=True,
cache_type=None,
cache_dir=None,
cache_url=None,
include_error_handler=None,
):
self.module_id = re.sub(r"\W", "_", module._template_uri)
self.uri = module._template_uri
self.input_encoding = module._source_encoding
self.output_encoding = output_encoding
self.encoding_errors = encoding_errors
self.enable_loop = module._enable_loop
self.module = module
self.filename = template_filename
ModuleInfo(
module,
module_filename,
self,
template_filename,
module_source,
template_source,
module._template_uri,
)
self.callable_ = self.module.render_body
self.format_exceptions = format_exceptions
self.error_handler = error_handler
self.include_error_handler = include_error_handler
self.lookup = lookup
self._setup_cache_args(
cache_impl,
cache_enabled,
cache_args,
cache_type,
cache_dir,
cache_url,
)
class DefTemplate(Template):
"""A :class:`.Template` which represents a callable def in a parent
template."""
def __init__(self, parent, callable_):
self.parent = parent
self.callable_ = callable_
self.output_encoding = parent.output_encoding
self.module = parent.module
self.encoding_errors = parent.encoding_errors
self.format_exceptions = parent.format_exceptions
self.error_handler = parent.error_handler
self.include_error_handler = parent.include_error_handler
self.enable_loop = parent.enable_loop
self.lookup = parent.lookup
def get_def(self, name):
return self.parent.get_def(name)
class ModuleInfo:
"""Stores information about a module currently loaded into
memory, provides reverse lookups of template source, module
source code based on a module's identifier.
"""
_modules = weakref.WeakValueDictionary()
def __init__(
self,
module,
module_filename,
template,
template_filename,
module_source,
template_source,
template_uri,
):
self.module = module
self.module_filename = module_filename
self.template_filename = template_filename
self.module_source = module_source
self.template_source = template_source
self.template_uri = template_uri
self._modules[module.__name__] = template._mmarker = self
if module_filename:
self._modules[module_filename] = self
@classmethod
def get_module_source_metadata(cls, module_source, full_line_map=False):
source_map = re.search(
r"__M_BEGIN_METADATA(.+?)__M_END_METADATA", module_source, re.S
).group(1)
source_map = json.loads(source_map)
source_map["line_map"] = {
int(k): int(v) for k, v in source_map["line_map"].items()
}
if full_line_map:
f_line_map = source_map["full_line_map"] = []
line_map = source_map["line_map"]
curr_templ_line = 1
for mod_line in range(1, max(line_map)):
if mod_line in line_map:
curr_templ_line = line_map[mod_line]
f_line_map.append(curr_templ_line)
return source_map
@property
def code(self):
if self.module_source is not None:
return self.module_source
else:
return util.read_python_file(self.module_filename)
@property
def source(self):
if self.template_source is None:
data = util.read_file(self.template_filename)
if self.module._source_encoding:
return data.decode(self.module._source_encoding)
else:
return data
elif self.module._source_encoding and not isinstance(
self.template_source, str
):
return self.template_source.decode(self.module._source_encoding)
else:
return self.template_source
def _compile(template, text, filename, generate_magic_comment):
lexer = template.lexer_cls(
text,
filename,
input_encoding=template.input_encoding,
preprocessor=template.preprocessor,
)
node = lexer.parse()
source = codegen.compile(
node,
template.uri,
filename,
default_filters=template.default_filters,
buffer_filters=template.buffer_filters,
imports=template.imports,
future_imports=template.future_imports,
source_encoding=lexer.encoding,
generate_magic_comment=generate_magic_comment,
strict_undefined=template.strict_undefined,
enable_loop=template.enable_loop,
reserved_names=template.reserved_names,
)
return source, lexer
def _compile_text(template, text, filename):
identifier = template.module_id
source, lexer = _compile(
template, text, filename, generate_magic_comment=False
)
cid = identifier
module = types.ModuleType(cid)
code = compile(source, cid, "exec")
# this exec() works for 2.4->3.3.
exec(code, module.__dict__, module.__dict__)
return (source, module)
def _compile_module_file(template, text, filename, outputpath, module_writer):
source, lexer = _compile(
template, text, filename, generate_magic_comment=True
)
if isinstance(source, str):
source = source.encode(lexer.encoding or "ascii")
if module_writer:
module_writer(source, outputpath)
else:
# make tempfiles in the same location as the ultimate
# location. this ensures they're on the same filesystem,
# avoiding synchronization issues.
(dest, name) = tempfile.mkstemp(dir=os.path.dirname(outputpath))
os.write(dest, source)
os.close(dest)
shutil.move(name, outputpath)
def _get_module_info_from_callable(callable_):
return _get_module_info(callable_.__globals__["__name__"])
def _get_module_info(filename):
return ModuleInfo._modules[filename]
| mit | b3c92f9aaddc870ea33f7f23902987fa | 32.321229 | 79 | 0.615601 | 4.335453 | false | false | false | false |
alisaifee/limits | limits/aio/storage/base.py | 1 | 2941 | from abc import ABC, abstractmethod
from deprecated.sphinx import versionadded
from limits.storage.registry import StorageRegistry
from limits.typing import List, Optional, Tuple, Union
from limits.util import LazyDependency
@versionadded(version="2.1")
class Storage(LazyDependency, metaclass=StorageRegistry):
"""
Base class to extend when implementing an async storage backend.
"""
STORAGE_SCHEME: Optional[List[str]]
"""The storage schemes to register against this implementation"""
def __init__(
self, uri: Optional[str] = None, **options: Union[float, str, bool]
) -> None:
super().__init__()
@abstractmethod
async def incr(
self, key: str, expiry: int, elastic_expiry: bool = False, amount: int = 1
) -> int:
"""
increments the counter for a given rate limit key
:param key: the key to increment
:param expiry: amount in seconds for the key to expire in
:param elastic_expiry: whether to keep extending the rate limit
window every hit.
:param amount: the number to increment by
"""
raise NotImplementedError
@abstractmethod
async def get(self, key: str) -> int:
"""
:param key: the key to get the counter value for
"""
raise NotImplementedError
@abstractmethod
async def get_expiry(self, key: str) -> int:
"""
:param key: the key to get the expiry for
"""
raise NotImplementedError
@abstractmethod
async def check(self) -> bool:
"""
check if storage is healthy
"""
raise NotImplementedError
@abstractmethod
async def reset(self) -> Optional[int]:
"""
reset storage to clear limits
"""
raise NotImplementedError
@abstractmethod
async def clear(self, key: str) -> None:
"""
resets the rate limit key
:param key: the key to clear rate limits for
"""
raise NotImplementedError
class MovingWindowSupport(ABC):
"""
Abstract base for storages that intend to support
the moving window strategy
"""
async def acquire_entry(
self, key: str, limit: int, expiry: int, amount: int = 1
) -> bool:
"""
:param key: rate limit key to acquire an entry in
:param limit: amount of entries allowed
:param expiry: expiry of the entry
:param amount: the number of entries to acquire
"""
raise NotImplementedError
async def get_moving_window(
self, key: str, limit: int, expiry: int
) -> Tuple[int, int]:
"""
returns the starting point and the number of entries in the moving
window
:param key: rate limit key
:param expiry: expiry of entry
:return: (start of window, number of acquired entries)
"""
raise NotImplementedError
| mit | a32fa10abb64284043f00dcf945a7b71 | 27.009524 | 82 | 0.616117 | 4.69059 | false | false | false | false |
alisaifee/limits | tests/conftest.py | 1 | 7329 | import os
import platform
import socket
import time
import pymemcache
import pymemcache.client
import pymongo
import pytest
import redis
import redis.sentinel
def check_redis_cluster_ready(host, port):
try:
return redis.Redis(host, port).cluster("info")["cluster_state"] == "ok"
except Exception:
return False
def check_redis_ssl_cluster_ready(host, port):
storage_url = (
"rediss://localhost:8301/?ssl_cert_reqs=required"
"&ssl_keyfile=./tests/tls/client.key"
"&ssl_certfile=./tests/tls/client.crt"
"&ssl_ca_certs=./tests/tls/ca.crt"
)
try:
return (
redis.Redis.from_url(storage_url).cluster("info")["cluster_state"] == "ok"
)
except Exception:
return False
def check_sentinel_ready(host, port):
try:
return redis.sentinel.Sentinel([(host, port)]).master_for("mymaster").ping()
except: # noqa
return False
def check_sentinel_auth_ready(host, port):
try:
return (
redis.sentinel.Sentinel(
[(host, port)],
sentinel_kwargs={"password": "sekret"},
password="sekret",
)
.master_for("mymaster")
.ping()
)
except: # noqa
return False
def check_mongo_ready(host, port):
try:
pymongo.MongoClient("mongodb://localhost:37017").server_info()
return True
except: # noqa
return False
@pytest.fixture(scope="session")
def host_ip_env():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("10.255.255.255", 1))
ip = s.getsockname()[0]
except Exception:
ip = "127.0.0.1"
finally:
s.close()
os.environ["HOST_IP"] = str(ip)
@pytest.fixture(scope="session")
def docker_services(host_ip_env, docker_services):
return docker_services
@pytest.fixture(scope="session")
def redis_basic_client(docker_services):
docker_services.start("redis-basic")
return redis.StrictRedis("localhost", 7379)
@pytest.fixture(scope="session")
def redis_uds_client(docker_services):
if platform.system().lower() == "darwin":
pytest.skip("Fixture not supported on OSX")
docker_services.start("redis-uds")
return redis.from_url("unix:///tmp/limits.redis.sock")
@pytest.fixture(scope="session")
def redis_auth_client(docker_services):
docker_services.start("redis-auth")
return redis.from_url("redis://:sekret@localhost:7389")
@pytest.fixture(scope="session")
def redis_ssl_client(docker_services):
docker_services.start("redis-ssl")
storage_url = (
"rediss://localhost:8379/0?ssl_cert_reqs=required"
"&ssl_keyfile=./tests/tls/client.key"
"&ssl_certfile=./tests/tls/client.crt"
"&ssl_ca_certs=./tests/tls/ca.crt"
)
return redis.from_url(storage_url)
@pytest.fixture(scope="session")
def redis_cluster_client(docker_services):
docker_services.start("redis-cluster-init")
docker_services.wait_for_service("redis-cluster-6", 7006, check_redis_cluster_ready)
if os.environ.get("CI") == "True":
time.sleep(10)
return redis.cluster.RedisCluster("localhost", 7001)
@pytest.fixture(scope="session")
def redis_ssl_cluster_client(docker_services):
docker_services.start("redis-ssl-cluster-init")
docker_services.wait_for_service(
"redis-ssl-cluster-6", 8306, check_redis_ssl_cluster_ready
)
if os.environ.get("CI") == "True":
time.sleep(10)
storage_url = (
"rediss://localhost:8301/?ssl_cert_reqs=required"
"&ssl_keyfile=./tests/tls/client.key"
"&ssl_certfile=./tests/tls/client.crt"
"&ssl_ca_certs=./tests/tls/ca.crt"
)
return redis.cluster.RedisCluster.from_url(storage_url)
@pytest.fixture(scope="session")
def redis_sentinel_client(docker_services):
docker_services.start("redis-sentinel")
docker_services.wait_for_service("redis-sentinel", 26379, check_sentinel_ready)
return redis.sentinel.Sentinel([("localhost", 26379)])
@pytest.fixture(scope="session")
def redis_sentinel_auth_client(docker_services):
docker_services.start("redis-sentinel-auth")
docker_services.wait_for_service(
"redis-sentinel-auth", 26379, check_sentinel_auth_ready
)
return redis.sentinel.Sentinel(
[("localhost", 36379)],
sentinel_kwargs={"password": "sekret"},
password="sekret",
)
@pytest.fixture(scope="session")
def memcached_client(docker_services):
docker_services.start("memcached-1")
return pymemcache.Client(("localhost", 22122))
@pytest.fixture(scope="session")
def memcached_cluster_client(docker_services):
docker_services.start("memcached-1")
docker_services.start("memcached-2")
return pymemcache.client.HashClient([("localhost", 22122), ("localhost", 22123)])
@pytest.fixture(scope="session")
def memcached_uds_client(docker_services):
if platform.system().lower() == "darwin":
pytest.skip("Fixture not supported on OSX")
docker_services.start("memcached-uds")
return pymemcache.Client("/tmp/limits.memcached.sock")
@pytest.fixture(scope="session")
def mongodb_client(docker_services):
docker_services.start("mongodb")
docker_services.wait_for_service("mongodb", 27017, check_mongo_ready)
return pymongo.MongoClient("mongodb://localhost:37017")
@pytest.fixture
def memcached(memcached_client):
memcached_client.flush_all()
return memcached_client
@pytest.fixture
def memcached_uds(memcached_uds_client):
memcached_uds_client.flush_all()
return memcached_uds_client
@pytest.fixture
def memcached_cluster(memcached_cluster_client):
memcached_cluster_client.flush_all()
return memcached_cluster_client
@pytest.fixture
def redis_basic(redis_basic_client):
redis_basic_client.flushall()
return redis_basic
@pytest.fixture
def redis_ssl(redis_ssl_client):
redis_ssl_client.flushall()
return redis_ssl_client
@pytest.fixture
def redis_auth(redis_auth_client):
redis_auth_client.flushall()
return redis_auth_client
@pytest.fixture
def redis_uds(redis_uds_client):
redis_uds_client.flushall()
return redis_uds_client
@pytest.fixture
def redis_cluster(redis_cluster_client):
redis_cluster_client.flushall()
return redis_cluster_client
@pytest.fixture
def redis_ssl_cluster(redis_ssl_cluster_client):
redis_ssl_cluster_client.flushall()
return redis_ssl_cluster_client
@pytest.fixture
def redis_sentinel(redis_sentinel_client):
redis_sentinel_client.master_for("mymaster").flushall()
return redis_sentinel
@pytest.fixture
def redis_sentinel_auth(redis_sentinel_auth_client):
redis_sentinel_auth_client.master_for("mymaster").flushall()
return redis_sentinel_auth_client
@pytest.fixture
def mongodb(mongodb_client):
mongodb_client.limits.windows.drop()
mongodb_client.limits.counters.drop()
return mongodb_client
@pytest.fixture(scope="session")
def docker_services_project_name():
return "limits"
@pytest.fixture(scope="session")
def docker_compose_files(pytestconfig):
"""Get the docker-compose.yml absolute path.
Override this fixture in your tests if you need a custom location.
"""
return ["docker-compose.yml"]
| mit | 7a1041dca290e05b2a9d71e7b390202b | 23.43 | 88 | 0.6769 | 3.510057 | false | true | false | false |
rst2pdf/rst2pdf | rst2pdf/tests/input/sphinx-issue529/conf.py | 1 | 1322 | # -*- coding: utf-8 -*-
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['rst2pdf.pdfbuilder']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Foobar'
copyright = u'2009, Jason S'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.1'
# The full version, including alpha/beta/rc tags.
release = '1.0.1'
# -- Options for sphinx.ext.todo extension -----------------------------------
todo_include_todos = True
# -- Options for PDF output --------------------------------------------------
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author).
pdf_documents = [
('index', u'index', u'index', u'lorenzo'),
]
# A comma-separated list of custom stylesheets. Example:
pdf_stylesheets = ['sphinx']
# If false, no index is generated.
pdf_use_index = False
# If false, no coverpage is generated.
pdf_use_coverpage = False
pdf_invariant = True
| mit | 94642590accf87dc403893fccc3b3e2f | 26.541667 | 80 | 0.638427 | 3.787966 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/python3/__init__.py | 1 | 16612 | import glob
import sh
import subprocess
from multiprocessing import cpu_count
from os import environ, utime
from os.path import dirname, exists, join
from pathlib import Path
import shutil
from pythonforandroid.logger import info, warning, shprint
from pythonforandroid.patching import version_starts_with
from pythonforandroid.recipe import Recipe, TargetPythonRecipe
from pythonforandroid.util import (
current_directory,
ensure_dir,
walk_valid_filens,
BuildInterruptingException,
)
NDK_API_LOWER_THAN_SUPPORTED_MESSAGE = (
'Target ndk-api is {ndk_api}, '
'but the python3 recipe supports only {min_ndk_api}+'
)
class Python3Recipe(TargetPythonRecipe):
'''
The python3's recipe
^^^^^^^^^^^^^^^^^^^^
The python 3 recipe can be built with some extra python modules, but to do
so, we need some libraries. By default, we ship the python3 recipe with
some common libraries, defined in ``depends``. We also support some optional
libraries, which are less common that the ones defined in ``depends``, so
we added them as optional dependencies (``opt_depends``).
Below you have a relationship between the python modules and the recipe
libraries::
- _ctypes: you must add the recipe for ``libffi``.
- _sqlite3: you must add the recipe for ``sqlite3``.
- _ssl: you must add the recipe for ``openssl``.
- _bz2: you must add the recipe for ``libbz2`` (optional).
- _lzma: you must add the recipe for ``liblzma`` (optional).
.. note:: This recipe can be built only against API 21+.
.. versionchanged:: 2019.10.06.post0
- Refactored from deleted class ``python.GuestPythonRecipe`` into here
- Added optional dependencies: :mod:`~pythonforandroid.recipes.libbz2`
and :mod:`~pythonforandroid.recipes.liblzma`
.. versionchanged:: 0.6.0
Refactored into class
:class:`~pythonforandroid.python.GuestPythonRecipe`
'''
version = '3.9.9'
url = 'https://www.python.org/ftp/python/{version}/Python-{version}.tgz'
name = 'python3'
patches = [
'patches/pyconfig_detection.patch',
'patches/reproducible-buildinfo.diff',
# Python 3.7.1
('patches/py3.7.1_fix-ctypes-util-find-library.patch', version_starts_with("3.7")),
('patches/py3.7.1_fix-zlib-version.patch', version_starts_with("3.7")),
# Python 3.8.1 & 3.9.X
('patches/py3.8.1.patch', version_starts_with("3.8")),
('patches/py3.8.1.patch', version_starts_with("3.9"))
]
if shutil.which('lld') is not None:
patches = patches + [
("patches/py3.7.1_fix_cortex_a8.patch", version_starts_with("3.7")),
("patches/py3.8.1_fix_cortex_a8.patch", version_starts_with("3.8")),
("patches/py3.8.1_fix_cortex_a8.patch", version_starts_with("3.9"))
]
depends = ['hostpython3', 'sqlite3', 'openssl', 'libffi']
# those optional depends allow us to build python compression modules:
# - _bz2.so
# - _lzma.so
opt_depends = ['libbz2', 'liblzma']
'''The optional libraries which we would like to get our python linked'''
configure_args = (
'--host={android_host}',
'--build={android_build}',
'--enable-shared',
'--enable-ipv6',
'ac_cv_file__dev_ptmx=yes',
'ac_cv_file__dev_ptc=no',
'--without-ensurepip',
'ac_cv_little_endian_double=yes',
'--prefix={prefix}',
'--exec-prefix={exec_prefix}',
'--enable-loadable-sqlite-extensions')
'''The configure arguments needed to build the python recipe. Those are
used in method :meth:`build_arch` (if not overwritten like python3's
recipe does).
'''
MIN_NDK_API = 21
'''Sets the minimal ndk api number needed to use the recipe.
.. warning:: This recipe can be built only against API 21+, so it means
that any class which inherits from class:`GuestPythonRecipe` will have
this limitation.
'''
stdlib_dir_blacklist = {
'__pycache__',
'test',
'tests',
'lib2to3',
'ensurepip',
'idlelib',
'tkinter',
}
'''The directories that we want to omit for our python bundle'''
stdlib_filen_blacklist = [
'*.py',
'*.exe',
'*.whl',
]
'''The file extensions that we want to blacklist for our python bundle'''
site_packages_dir_blacklist = {
'__pycache__',
'tests'
}
'''The directories from site packages dir that we don't want to be included
in our python bundle.'''
site_packages_filen_blacklist = [
'*.py'
]
'''The file extensions from site packages dir that we don't want to be
included in our python bundle.'''
compiled_extension = '.pyc'
'''the default extension for compiled python files.
.. note:: the default extension for compiled python files has been .pyo for
python 2.x-3.4 but as of Python 3.5, the .pyo filename extension is no
longer used and has been removed in favour of extension .pyc
'''
def __init__(self, *args, **kwargs):
self._ctx = None
super().__init__(*args, **kwargs)
@property
def _libpython(self):
'''return the python's library name (with extension)'''
return 'libpython{link_version}.so'.format(
link_version=self.link_version
)
@property
def link_version(self):
'''return the python's library link version e.g. 3.7m, 3.8'''
major, minor = self.major_minor_version_string.split('.')
flags = ''
if major == '3' and int(minor) < 8:
flags += 'm'
return '{major}.{minor}{flags}'.format(
major=major,
minor=minor,
flags=flags
)
def include_root(self, arch_name):
return join(self.get_build_dir(arch_name), 'Include')
def link_root(self, arch_name):
return join(self.get_build_dir(arch_name), 'android-build')
def should_build(self, arch):
return not Path(self.link_root(arch.arch), self._libpython).is_file()
def prebuild_arch(self, arch):
super().prebuild_arch(arch)
self.ctx.python_recipe = self
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
env = super().get_recipe_env(arch)
env['HOSTARCH'] = arch.command_prefix
env['CC'] = arch.get_clang_exe(with_target=True)
env['PATH'] = (
'{hostpython_dir}:{old_path}').format(
hostpython_dir=self.get_recipe(
'host' + self.name, self.ctx).get_path_to_python(),
old_path=env['PATH'])
env['CFLAGS'] = ' '.join(
[
'-fPIC',
'-DANDROID'
]
)
env['LDFLAGS'] = env.get('LDFLAGS', '')
if shutil.which('lld') is not None:
# Note: The -L. is to fix a bug in python 3.7.
# https://bugs.freebsd.org/bugzilla/show_bug.cgi?id=234409
env['LDFLAGS'] += ' -L. -fuse-ld=lld'
else:
warning('lld not found, linking without it. '
'Consider installing lld if linker errors occur.')
return env
def set_libs_flags(self, env, arch):
'''Takes care to properly link libraries with python depending on our
requirements and the attribute :attr:`opt_depends`.
'''
def add_flags(include_flags, link_dirs, link_libs):
env['CPPFLAGS'] = env.get('CPPFLAGS', '') + include_flags
env['LDFLAGS'] = env.get('LDFLAGS', '') + link_dirs
env['LIBS'] = env.get('LIBS', '') + link_libs
if 'sqlite3' in self.ctx.recipe_build_order:
info('Activating flags for sqlite3')
recipe = Recipe.get_recipe('sqlite3', self.ctx)
add_flags(' -I' + recipe.get_build_dir(arch.arch),
' -L' + recipe.get_lib_dir(arch), ' -lsqlite3')
if 'libffi' in self.ctx.recipe_build_order:
info('Activating flags for libffi')
recipe = Recipe.get_recipe('libffi', self.ctx)
# In order to force the correct linkage for our libffi library, we
# set the following variable to point where is our libffi.pc file,
# because the python build system uses pkg-config to configure it.
env['PKG_CONFIG_PATH'] = recipe.get_build_dir(arch.arch)
add_flags(' -I' + ' -I'.join(recipe.get_include_dirs(arch)),
' -L' + join(recipe.get_build_dir(arch.arch), '.libs'),
' -lffi')
if 'openssl' in self.ctx.recipe_build_order:
info('Activating flags for openssl')
recipe = Recipe.get_recipe('openssl', self.ctx)
self.configure_args += \
('--with-openssl=' + recipe.get_build_dir(arch.arch),)
add_flags(recipe.include_flags(arch),
recipe.link_dirs_flags(arch), recipe.link_libs_flags())
for library_name in {'libbz2', 'liblzma'}:
if library_name in self.ctx.recipe_build_order:
info(f'Activating flags for {library_name}')
recipe = Recipe.get_recipe(library_name, self.ctx)
add_flags(recipe.get_library_includes(arch),
recipe.get_library_ldflags(arch),
recipe.get_library_libs_flag())
# python build system contains hardcoded zlib version which prevents
# the build of zlib module, here we search for android's zlib version
# and sets the right flags, so python can be build with android's zlib
info("Activating flags for android's zlib")
zlib_lib_path = arch.ndk_lib_dir_versioned
zlib_includes = self.ctx.ndk.sysroot_include_dir
zlib_h = join(zlib_includes, 'zlib.h')
try:
with open(zlib_h) as fileh:
zlib_data = fileh.read()
except IOError:
raise BuildInterruptingException(
"Could not determine android's zlib version, no zlib.h ({}) in"
" the NDK dir includes".format(zlib_h)
)
for line in zlib_data.split('\n'):
if line.startswith('#define ZLIB_VERSION '):
break
else:
raise BuildInterruptingException(
'Could not parse zlib.h...so we cannot find zlib version,'
'required by python build,'
)
env['ZLIB_VERSION'] = line.replace('#define ZLIB_VERSION ', '')
add_flags(' -I' + zlib_includes, ' -L' + zlib_lib_path, ' -lz')
return env
def build_arch(self, arch):
if self.ctx.ndk_api < self.MIN_NDK_API:
raise BuildInterruptingException(
NDK_API_LOWER_THAN_SUPPORTED_MESSAGE.format(
ndk_api=self.ctx.ndk_api, min_ndk_api=self.MIN_NDK_API
),
)
recipe_build_dir = self.get_build_dir(arch.arch)
# Create a subdirectory to actually perform the build
build_dir = join(recipe_build_dir, 'android-build')
ensure_dir(build_dir)
# TODO: Get these dynamically, like bpo-30386 does
sys_prefix = '/usr/local'
sys_exec_prefix = '/usr/local'
env = self.get_recipe_env(arch)
env = self.set_libs_flags(env, arch)
android_build = sh.Command(
join(recipe_build_dir,
'config.guess'))().stdout.strip().decode('utf-8')
with current_directory(build_dir):
if not exists('config.status'):
shprint(
sh.Command(join(recipe_build_dir, 'configure')),
*(' '.join(self.configure_args).format(
android_host=env['HOSTARCH'],
android_build=android_build,
prefix=sys_prefix,
exec_prefix=sys_exec_prefix)).split(' '),
_env=env)
shprint(
sh.make, 'all', '-j', str(cpu_count()),
'INSTSONAME={lib_name}'.format(lib_name=self._libpython),
_env=env
)
# TODO: Look into passing the path to pyconfig.h in a
# better way, although this is probably acceptable
sh.cp('pyconfig.h', join(recipe_build_dir, 'Include'))
def compile_python_files(self, dir):
'''
Compile the python files (recursively) for the python files inside
a given folder.
.. note:: python2 compiles the files into extension .pyo, but in
python3, and as of Python 3.5, the .pyo filename extension is no
longer used...uses .pyc (https://www.python.org/dev/peps/pep-0488)
'''
args = [self.ctx.hostpython]
args += ['-OO', '-m', 'compileall', '-b', '-f', dir]
subprocess.call(args)
def create_python_bundle(self, dirn, arch):
"""
Create a packaged python bundle in the target directory, by
copying all the modules and standard library to the right
place.
"""
# Todo: find a better way to find the build libs folder
modules_build_dir = join(
self.get_build_dir(arch.arch),
'android-build',
'build',
'lib.linux{}-{}-{}'.format(
'2' if self.version[0] == '2' else '',
arch.command_prefix.split('-')[0],
self.major_minor_version_string
))
# Compile to *.pyc the python modules
self.compile_python_files(modules_build_dir)
# Compile to *.pyc the standard python library
self.compile_python_files(join(self.get_build_dir(arch.arch), 'Lib'))
# Compile to *.pyc the other python packages (site-packages)
self.compile_python_files(self.ctx.get_python_install_dir(arch.arch))
# Bundle compiled python modules to a folder
modules_dir = join(dirn, 'modules')
c_ext = self.compiled_extension
ensure_dir(modules_dir)
module_filens = (glob.glob(join(modules_build_dir, '*.so')) +
glob.glob(join(modules_build_dir, '*' + c_ext)))
info("Copy {} files into the bundle".format(len(module_filens)))
for filen in module_filens:
info(" - copy {}".format(filen))
shutil.copy2(filen, modules_dir)
# zip up the standard library
stdlib_zip = join(dirn, 'stdlib.zip')
with current_directory(join(self.get_build_dir(arch.arch), 'Lib')):
stdlib_filens = list(walk_valid_filens(
'.', self.stdlib_dir_blacklist, self.stdlib_filen_blacklist))
if 'SOURCE_DATE_EPOCH' in environ:
# for reproducible builds
stdlib_filens.sort()
timestamp = int(environ['SOURCE_DATE_EPOCH'])
for filen in stdlib_filens:
utime(filen, (timestamp, timestamp))
info("Zip {} files into the bundle".format(len(stdlib_filens)))
shprint(sh.zip, '-X', stdlib_zip, *stdlib_filens)
# copy the site-packages into place
ensure_dir(join(dirn, 'site-packages'))
ensure_dir(self.ctx.get_python_install_dir(arch.arch))
# TODO: Improve the API around walking and copying the files
with current_directory(self.ctx.get_python_install_dir(arch.arch)):
filens = list(walk_valid_filens(
'.', self.site_packages_dir_blacklist,
self.site_packages_filen_blacklist))
info("Copy {} files into the site-packages".format(len(filens)))
for filen in filens:
info(" - copy {}".format(filen))
ensure_dir(join(dirn, 'site-packages', dirname(filen)))
shutil.copy2(filen, join(dirn, 'site-packages', filen))
# copy the python .so files into place
python_build_dir = join(self.get_build_dir(arch.arch),
'android-build')
python_lib_name = 'libpython' + self.link_version
shprint(
sh.cp,
join(python_build_dir, python_lib_name + '.so'),
join(self.ctx.bootstrap.dist_dir, 'libs', arch.arch)
)
info('Renaming .so files to reflect cross-compile')
self.reduce_object_file_names(join(dirn, 'site-packages'))
return join(dirn, 'site-packages')
recipe = Python3Recipe()
| mit | 7a29299d2bc8facaae62b435fa764231 | 37.722611 | 91 | 0.575066 | 3.855187 | false | false | false | false |
kivy/python-for-android | tests/test_pythonpackage.py | 3 | 3645 | """
THESE TESTS DON'T RUN IN GITHUB-ACTIONS (takes too long!!)
ONLY THE BASIC ONES IN test_pythonpackage_basic.py DO.
(This file basically covers all tests for any of the
functions that aren't already part of the basic
test set)
"""
import os
import shutil
import tempfile
from pythonforandroid.pythonpackage import (
_extract_info_from_package,
extract_metainfo_files_from_package,
get_package_as_folder,
get_package_dependencies,
)
def local_repo_folder():
return os.path.abspath(os.path.join(
os.path.dirname(__file__), ".."
))
def test_get_package_dependencies():
# TEST 1 from source code folder:
deps_nonrecursive = get_package_dependencies(
local_repo_folder(), recursive=False
)
deps_recursive = get_package_dependencies(
local_repo_folder(), recursive=True
)
# Check that jinja2 is returned as direct dep:
assert len([dep for dep in deps_nonrecursive
if "jinja2" in dep]) > 0
# Check that MarkupSafe is returned as indirect dep of jinja2:
assert [
dep for dep in deps_recursive
if "MarkupSafe" in dep
]
# Check setuptools not being in non-recursive deps:
# (It will be in recursive ones due to p4a's pep517 dependency)
assert "setuptools" not in deps_nonrecursive
# Check setuptools is present in non-recursive deps,
# if we also add build requirements:
assert "setuptools" in get_package_dependencies(
local_repo_folder(), recursive=False,
include_build_requirements=True,
)
# TEST 2 from external ref:
# Check that jinja2 is returned as direct dep:
assert len([dep for dep in get_package_dependencies("python-for-android")
if "jinja2" in dep]) > 0
# Check that MarkupSafe is returned as indirect dep of jinja2:
assert [
dep for dep in get_package_dependencies(
"python-for-android", recursive=True
)
if "MarkupSafe" in dep
]
def test_extract_metainfo_files_from_package():
# TEST 1 from external ref:
files_dir = tempfile.mkdtemp()
try:
extract_metainfo_files_from_package("python-for-android",
files_dir, debug=True)
assert os.path.exists(os.path.join(files_dir, "METADATA"))
finally:
shutil.rmtree(files_dir)
# TEST 2 from local folder:
files_dir = tempfile.mkdtemp()
try:
extract_metainfo_files_from_package(local_repo_folder(),
files_dir, debug=True)
assert os.path.exists(os.path.join(files_dir, "METADATA"))
finally:
shutil.rmtree(files_dir)
def test_get_package_as_folder():
# WARNING !!! This function behaves DIFFERENTLY if the requested package
# has a wheel available vs a source package. What we're getting is
# essentially what pip also would fetch, but this can obviously CHANGE
# depending on what is happening/available on PyPI.
#
# Therefore, this test doesn't really go in-depth.
(obtained_type, obtained_path) = \
get_package_as_folder("python-for-android")
try:
assert obtained_type in {"source", "wheel"}
assert os.path.isdir(obtained_path)
finally:
# Try to ensure cleanup:
shutil.rmtree(obtained_path)
def test__extract_info_from_package():
# This is indirectly already tested a lot through get_package_name()
# and get_package_dependencies(), so we'll just do one basic test:
assert _extract_info_from_package(
local_repo_folder(),
extract_type="name"
) == "python-for-android"
| mit | 219a2d0ae791f0914c0708a4466dd11c | 31.837838 | 77 | 0.651303 | 3.898396 | false | true | false | false |
rst2pdf/rst2pdf | rst2pdf/pygments2style.py | 1 | 2603 | # -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
'''
Creates a rst2pdf stylesheet for each pygments style.
'''
import os
from dumpstyle import dumps
from pygments.token import STANDARD_TYPES
from pygments import styles as pstyles
# First get a list of all possible classes
classnames = set()
for name in list(pstyles.get_all_styles()):
css = os.popen('pygmentize -S %s -f html' % name, 'r').read()
for line in css.splitlines():
line = line.strip()
sname = "pygments-" + line.split(' ')[0][1:]
classnames.add(sname)
def css2rl(css):
dstyles = {}
# First create a dumb stylesheet
for key in STANDARD_TYPES:
dstyles["pygments-" + STANDARD_TYPES[key]] = {'parent': 'code'}
seenclassnames = set()
styles = []
for line in css.splitlines():
line = line.strip()
sname = "pygments-" + line.split(' ')[0][1:]
seenclassnames.add(sname)
style = dstyles.get(sname, {'parent': 'code'})
options = line.split('{')[1].split('}')[0].split(';')
for option in options:
option = option.strip()
option, argument = option.split(':')
option = option.strip()
argument = argument.strip()
if option == 'color':
style['textColor'] = argument.strip()
if option == 'background-color':
style['backColor'] = argument.strip()
# These two can come in any order
if option == 'font-weight' and argument == 'bold':
if 'fontName' in style and style['fontName'] == 'fontMonoItalic':
style['fontName'] = 'fontMonoBoldItalic'
else:
style['fontName'] = 'fontMonoBold'
if option == 'font-style' and argument == 'italic':
if 'fontName' in style and style['fontName'] == 'fontSansBold':
style['fontName'] = 'fontMonoBoldItalic'
else:
style['fontName'] = 'fontMonoItalic'
if style.get('textColor', None) is None:
style['textColor'] = 'black'
styles.append([sname, style])
# Now add default styles for all unseen class names
for sname in classnames - seenclassnames:
style = dstyles.get(sname, {'parent': 'code'})
style['textColor'] = 'black'
styles.append([sname, style])
return dumps({'styles': styles})
for name in list(pstyles.get_all_styles()):
css = os.popen('pygmentize -S %s -f html' % name, 'r').read()
open(name + '.style', 'w').write(css2rl(css))
| mit | 27ee036a9e0ac4b4ef19feac2803f39d | 34.657534 | 81 | 0.566654 | 3.816716 | false | false | false | false |
kivy/python-for-android | setup.py | 1 | 4752 |
import glob
from io import open # for open(..,encoding=...) parameter in python 2
from os import walk
from os.path import join, dirname, sep
import re
from setuptools import setup, find_packages
# NOTE: All package data should also be set in MANIFEST.in
packages = find_packages()
package_data = {'': ['*.tmpl',
'*.patch',
'*.diff', ], }
data_files = []
# must be a single statement since buildozer is currently parsing it, refs:
# https://github.com/kivy/buildozer/issues/722
install_reqs = [
'appdirs', 'colorama>=0.3.3', 'jinja2',
'sh>=1.10; sys_platform!="nt"',
'pep517', 'toml', 'packaging',
]
# (pep517 and toml are used by pythonpackage.py)
# By specifying every file manually, package_data will be able to
# include them in binary distributions. Note that we have to add
# everything as a 'pythonforandroid' rule, using '' apparently doesn't
# work.
def recursively_include(results, directory, patterns):
for root, subfolders, files in walk(directory):
for fn in files:
if not any(glob.fnmatch.fnmatch(fn, pattern) for pattern in patterns):
continue
filename = join(root, fn)
directory = 'pythonforandroid'
if directory not in results:
results[directory] = []
results[directory].append(join(*filename.split(sep)[1:]))
recursively_include(package_data, 'pythonforandroid/recipes',
['*.patch', 'Setup*', '*.pyx', '*.py', '*.c', '*.h',
'*.mk', '*.jam', '*.diff', ])
recursively_include(package_data, 'pythonforandroid/bootstraps',
['*.properties', '*.xml', '*.java', '*.tmpl', '*.txt', '*.png',
'*.mk', '*.c', '*.h', '*.py', '*.sh', '*.jpg', '*.aidl',
'*.gradle', '.gitkeep', 'gradlew*', '*.jar', "*.patch", ])
recursively_include(package_data, 'pythonforandroid/bootstraps',
['sdl-config', ])
recursively_include(package_data, 'pythonforandroid/bootstraps/webview',
['*.html', ])
recursively_include(package_data, 'pythonforandroid',
['liblink', 'biglink', 'liblink.sh'])
with open(join(dirname(__file__), 'README.md'),
encoding="utf-8",
errors="replace",
) as fileh:
long_description = fileh.read()
init_filen = join(dirname(__file__), 'pythonforandroid', '__init__.py')
version = None
try:
with open(init_filen,
encoding="utf-8",
errors="replace"
) as fileh:
lines = fileh.readlines()
except IOError:
pass
else:
for line in lines:
line = line.strip()
if line.startswith('__version__ = '):
matches = re.findall(r'["\'].+["\']', line)
if matches:
version = matches[0].strip("'").strip('"')
break
if version is None:
raise Exception('Error: version could not be loaded from {}'.format(init_filen))
setup(name='python-for-android',
version=version,
description='Android APK packager for Python scripts and apps',
long_description=long_description,
long_description_content_type='text/markdown',
python_requires=">=3.7.0",
author='The Kivy team',
author_email='kivy-dev@googlegroups.com',
url='https://github.com/kivy/python-for-android',
license='MIT',
install_requires=install_reqs,
entry_points={
'console_scripts': [
'python-for-android = pythonforandroid.entrypoints:main',
'p4a = pythonforandroid.entrypoints:main',
],
'distutils.commands': [
'apk = pythonforandroid.bdistapk:BdistAPK',
'aar = pythonforandroid.bdistapk:BdistAAR',
'aab = pythonforandroid.bdistapk:BdistAAB',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: OS Independent',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Android',
'Programming Language :: C',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: 3.10',
'Topic :: Software Development',
'Topic :: Utilities',
],
packages=packages,
package_data=package_data,
)
| mit | 7711fa1ed9bfcb0124c0ee20736381f4 | 35.837209 | 84 | 0.569865 | 4.044255 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/android/src/android/_ctypes_library_finder.py | 3 | 2516 |
import sys
import os
def get_activity_lib_dir(activity_name):
from jnius import autoclass
# Get the actual activity instance:
activity_class = autoclass(activity_name)
if activity_class is None:
return None
activity = None
if hasattr(activity_class, "mActivity") and \
activity_class.mActivity is not None:
activity = activity_class.mActivity
elif hasattr(activity_class, "mService") and \
activity_class.mService is not None:
activity = activity_class.mService
if activity is None:
return None
# Extract the native lib dir from the activity instance:
package_name = activity.getApplicationContext().getPackageName()
manager = activity.getApplicationContext().getPackageManager()
manager_class = autoclass("android.content.pm.PackageManager")
native_lib_dir = manager.getApplicationInfo(
package_name, manager_class.GET_SHARED_LIBRARY_FILES
).nativeLibraryDir
return native_lib_dir
def does_libname_match_filename(search_name, file_path):
# Filter file names so given search_name="mymodule" we match one of:
# mymodule.so (direct name + .so)
# libmymodule.so (added lib prefix)
# mymodule.arm64.so (added dot-separated middle parts)
# mymodule.so.1.3.4 (added dot-separated version tail)
# and all above (all possible combinations)
import re
file_name = os.path.basename(file_path)
return (re.match(r"^(lib)?" + re.escape(search_name) +
r"\.(.*\.)?so(\.[0-9]+)*$", file_name) is not None)
def find_library(name):
# Obtain all places for native libraries:
if sys.maxsize > 2**32: # 64bit-build
lib_search_dirs = ["/system/lib64", "/system/lib"]
else:
lib_search_dirs = ["/system/lib"]
lib_dir_1 = get_activity_lib_dir("org.kivy.android.PythonActivity")
if lib_dir_1 is not None:
lib_search_dirs.insert(0, lib_dir_1)
lib_dir_2 = get_activity_lib_dir("org.kivy.android.PythonService")
if lib_dir_2 is not None and lib_dir_2 not in lib_search_dirs:
lib_search_dirs.insert(0, lib_dir_2)
# Now scan the lib dirs:
for lib_dir in [ldir for ldir in lib_search_dirs if os.path.exists(ldir)]:
filelist = [
f for f in os.listdir(lib_dir)
if does_libname_match_filename(name, f)
]
if len(filelist) > 0:
return os.path.join(lib_dir, filelist[0])
return None
| mit | 2afd20e264beebeacde46553537128fa | 36.552239 | 78 | 0.641494 | 3.543662 | false | false | false | false |
rst2pdf/rst2pdf | rst2pdf/styles.py | 1 | 35530 | # -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
from copy import copy
import os
import os.path
import sys
import re
import docutils.nodes
import reportlab
import reportlab.lib.colors as colors
from reportlab.lib.fonts import addMapping
import reportlab.lib.pagesizes as pagesizes
from reportlab.lib.styles import StyleSheet1, ParagraphStyle
import reportlab.lib.units as units
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfbase import pdfmetrics
import reportlab.rl_config
import yaml
from . import findfonts
from .log import log
from .rson import loads as rson_loads
unit_separator = re.compile('(-?[0-9.]*)')
class StyleSheet(object):
"""Class to handle a collection of stylesheets"""
@staticmethod
def stylepairs(data):
"""Allows pairs of style information to be expressed
in canonical reportlab list of two-item list/tuple,
or in a more human-readable dictionary.
"""
styles = data.get('styles', {})
try:
stylenames = list(styles.keys())
except AttributeError:
for style in styles:
yield style
return
# Traditional reportlab styles are in ordered (key, value)
# tuples. We also support dictionary lookup. This is not
# necessarily ordered.
# The only problem with dictionary lookup is that
# we need to insure that parents are processed before
# their children. This loop is a little ugly, but
# gets the job done.
while stylenames:
name = stylenames.pop()
parent = styles[name].get('parent')
if parent not in stylenames:
yield name, styles[name]
continue
names = [name]
while parent in stylenames:
stylenames.remove(parent)
names.append(parent)
parent = styles[names[-1]].get('parent')
while names:
name = names.pop()
yield name, styles[name]
def __init__(self, flist, font_path=None, style_path=None, def_dpi=300):
log.info('Using stylesheets: %s' % ','.join(flist))
self.suppress_undefined_style_warning = False
# find base path
if hasattr(sys, 'frozen'):
self.PATH = os.path.abspath(os.path.dirname(sys.executable))
else:
self.PATH = os.path.abspath(os.path.dirname(__file__))
# flist is a list of stylesheet filenames.
# They will be loaded and merged in order.
# but the two default stylesheets will always
# be loaded first
flist = [
os.path.join(self.PATH, 'styles', 'styles.yaml'),
os.path.join(self.PATH, 'styles', 'default.yaml'),
] + flist
self.def_dpi = def_dpi
if font_path is None:
font_path = []
font_path += ['.', os.path.join(self.PATH, 'fonts')]
self.FontSearchPath = list(map(os.path.expanduser, font_path))
if style_path is None:
style_path = []
style_path += [
'.',
os.path.join(self.PATH, 'styles'),
'~/.rst2pdf/styles',
]
self.StyleSearchPath = list(map(os.path.expanduser, style_path))
# Remove duplicates but preserve order. Not very efficient, but these are short lists
self.FontSearchPath = [
x
for (i, x) in enumerate(self.FontSearchPath)
if self.FontSearchPath.index(x) == i
]
self.StyleSearchPath = [
x
for (i, x) in enumerate(self.StyleSearchPath)
if self.StyleSearchPath.index(x) == i
]
log.info('FontPath:%s' % self.FontSearchPath)
log.info('StylePath:%s' % self.StyleSearchPath)
findfonts.flist = self.FontSearchPath
# Page width, height
self.pw = 0
self.ph = 0
# Page size [w,h]
self.ps = None
# Margins (top,bottom,left,right,gutter)
self.tm = 0
self.bm = 0
self.lm = 0
self.rm = 0
self.gm = 0
# text width
self.tw = 0
# Default emsize, later it will be the fontSize of the base style
self.emsize = 10
self.languages = []
ssdata = self.readSheets(flist)
# Get pageSetup data from all stylessheets in order:
self.ps = pagesizes.A4
self.page = {}
for data, ssname in ssdata:
page = data.get('pageSetup', {})
if page:
self.page.update(page)
pgs = page.get('size', None)
if pgs: # A standard size
pgs = pgs.upper()
if pgs in pagesizes.__dict__:
self.ps = list(pagesizes.__dict__[pgs])
self.psname = pgs
if 'width' in self.page:
del self.page['width']
if 'height' in self.page:
del self.page['height']
elif pgs.endswith('-LANDSCAPE'):
self.psname = pgs.split('-')[0]
self.ps = list(
pagesizes.landscape(pagesizes.__dict__[self.psname])
)
if 'width' in self.page:
del self.page['width']
if 'height' in self.page:
del self.page['height']
else:
log.critical(
'Unknown page size %s in stylesheet %s'
% (page['size'], ssname)
)
continue
else: # A custom size
if 'size' in self.page:
del self.page['size']
# The sizes are expressed in some unit.
# For example, 2cm is 2 centimeters, and we need
# to do 2*cm (cm comes from reportlab.lib.units)
if 'width' in page:
self.ps[0] = self.adjustUnits(page['width'])
if 'height' in page:
self.ps[1] = self.adjustUnits(page['height'])
self.pw, self.ph = self.ps
if 'margin-left' in page:
self.lm = self.adjustUnits(page['margin-left'])
if 'margin-right' in page:
self.rm = self.adjustUnits(page['margin-right'])
if 'margin-top' in page:
self.tm = self.adjustUnits(page['margin-top'])
if 'margin-bottom' in page:
self.bm = self.adjustUnits(page['margin-bottom'])
if 'margin-gutter' in page:
self.gm = self.adjustUnits(page['margin-gutter'])
if 'spacing-header' in page:
self.ts = self.adjustUnits(page['spacing-header'])
if 'spacing-footer' in page:
self.bs = self.adjustUnits(page['spacing-footer'])
if 'firstTemplate' in page:
self.firstTemplate = page['firstTemplate']
# tw is the text width.
# We need it to calculate header-footer height
# and compress literal blocks.
self.tw = self.pw - self.lm - self.rm - self.gm
# Get page templates from all stylesheets
self.pageTemplates = {}
for data, ssname in ssdata:
templates = data.get('pageTemplates', {})
# templates is a dictionary of pageTemplates
for key in templates:
template = templates[key]
# template is a dict.
# template[´frames'] is a list of frames
if key in self.pageTemplates:
self.pageTemplates[key].update(template)
else:
self.pageTemplates[key] = template
# Get font aliases from all stylesheets in order
self.fontsAlias = {}
for data, ssname in ssdata:
self.fontsAlias.update(data.get('fontsAlias', {}))
embedded_fontnames = []
self.embedded = []
# Embed all fonts indicated in all stylesheets
for data, ssname in ssdata:
embedded = data.get('embeddedFonts', [])
for font in embedded:
try:
# (removed the feature that supported a string here, this isn't
# documented or supported)
# Each "font" is a list of four files, which will be
# used for regular / bold / italic / bold+italic
# versions of the font.
# If your font doesn't have one of them, just repeat
# the regular font.
# Example, using the Tuffy font from
# http://tulrich.com/fonts/
# "embeddedFonts" : [
# ["Tuffy.ttf",
# "Tuffy_Bold.ttf",
# "Tuffy_Italic.ttf",
# "Tuffy_Bold_Italic.ttf"]
# ],
# The fonts will be registered with the file name,
# minus the extension.
if font[0].lower().endswith('.ttf'): # A True Type font
for variant in font:
location = self.findFont(variant)
# strip extension and leading path to get the name
# to register the font under
filename = os.path.basename(variant)
fontname = str(filename.split('.')[0])
pdfmetrics.registerFont(TTFont(fontname, location))
log.info(
'Registering font: %s from %s' % (fontname, location)
)
self.embedded.append(fontname)
# And map them all together
regular, bold, italic, bolditalic = [
variant.split('.')[0] for variant in font
]
addMapping(regular, 0, 0, regular)
addMapping(regular, 0, 1, italic)
addMapping(regular, 1, 0, bold)
addMapping(regular, 1, 1, bolditalic)
else: # A Type 1 font
# For type 1 fonts we require
# [FontName,regular,italic,bold,bolditalic]
# where each variant is a (pfbfile,afmfile) pair.
# For example, for the URW palladio from TeX:
# ["Palatino",("uplr8a.pfb","uplr8a.afm"),
# ("uplri8a.pfb","uplri8a.afm"),
# ("uplb8a.pfb","uplb8a.afm"),
# ("uplbi8a.pfb","uplbi8a.afm")]
regular = pdfmetrics.EmbeddedType1Face(*font[1])
italic = pdfmetrics.EmbeddedType1Face(*font[2])
bold = pdfmetrics.EmbeddedType1Face(*font[3])
bolditalic = pdfmetrics.EmbeddedType1Face(*font[4])
except Exception as e:
try:
if isinstance(font, list):
fname = font[0]
else:
fname = font
log.critical(
"Error processing font %s: %s",
os.path.splitext(fname)[0],
str(e),
)
sys.exit(1)
except Exception as e:
log.critical("Error processing font %s: %s", fname, str(e))
sys.exit(1)
# Go though all styles in all stylesheets and find all fontNames.
# Then decide what to do with them
for data, ssname in ssdata:
for [skey, style] in self.stylepairs(data):
for key in style:
if key == 'fontName' or key.endswith('FontName'):
# It's an alias, replace it
if style[key] in self.fontsAlias:
style[key] = self.fontsAlias[style[key]]
# Embedded already, nothing to do
if style[key] in self.embedded:
continue
# Standard font, nothing to do
if style[key] in (
"Courier",
"Courier-Bold",
"Courier-BoldOblique",
"Courier-Oblique",
"Helvetica",
"Helvetica-Bold",
"Helvetica-BoldOblique",
"Helvetica-Oblique",
"Symbol",
"Times-Bold",
"Times-BoldItalic",
"Times-Italic",
"Times-Roman",
"ZapfDingbats",
):
continue
# Now we need to do something
# See if we can find the font
fname, pos = findfonts.guessFont(style[key])
fontList = findfonts.autoEmbed(style[key])
if style[key] not in embedded_fontnames and fontList:
embedded_fontnames.append(style[key])
if not fontList:
if (fname, pos) in embedded_fontnames:
fontList = None
else:
fontList = findfonts.autoEmbed(fname)
if fontList:
embedded_fontnames.append((fname, pos))
if fontList:
self.embedded += fontList
# Maybe the font we got is not called
# the same as the one we gave so check that out
suff = ["", "-Bold", "-Oblique", "-BoldOblique"]
if not fontList[0].startswith(style[key]):
# We need to create font aliases, and use them
basefname = style[key].split('-')[0]
for fname, aliasname in zip(
fontList,
[basefname + suffix for suffix in suff],
):
self.fontsAlias[aliasname] = fname
style[key] = self.fontsAlias[basefname + suff[pos]]
else:
log.error(
"Unknown font: \"%s\"," "replacing with Helvetica",
style[key],
)
style[key] = "Helvetica"
# Get styles from all stylesheets in order
self.stylesheet = {}
self.styles = []
self.linkColor = 'navy'
# FIXME: linkColor should probably not be a global
# style, and tocColor should probably not
# be a special case, but for now I'm going
# with the flow...
self.tocColor = None
for data, ssname in ssdata:
self.linkColor = data.get('linkColor') or self.linkColor
self.tocColor = data.get('tocColor') or self.tocColor
for [skey, style] in self.stylepairs(data):
sdict = {}
# FIXME: this is done completely backwards
for key in style:
# Handle color references by name
if key == 'color' or key.endswith('Color') and style[key]:
style[key] = formatColor(style[key])
elif key == 'commands':
style[key] = validateCommands(style[key])
# Handle alignment constants
elif key == 'alignment':
style[key] = dict(
TA_LEFT=0,
LEFT=0,
TA_CENTER=1,
CENTER=1,
TA_CENTRE=1,
CENTRE=1,
TA_RIGHT=2,
RIGHT=2,
TA_JUSTIFY=4,
JUSTIFY=4,
DECIMAL=8,
)[style[key].upper()]
elif key == 'language':
if not style[key] in self.languages:
self.languages.append(style[key])
sdict[key] = style[key]
sdict['name'] = skey
# If the style already exists, update it
if skey in self.stylesheet:
self.stylesheet[skey].update(sdict)
else: # New style
self.stylesheet[skey] = sdict
self.styles.append(sdict)
# If the stylesheet has a style name docutils won't reach
# make a copy with a sanitized name.
# This may make name collisions possible but that should be
# rare (who would have custom_name and custom-name in the
# same stylesheet? ;-)
# Issue 339
styles2 = []
for s in self.styles:
if not re.match("^[a-z](-?[a-z0-9]+)*$", s['name']):
s2 = copy(s)
s2['name'] = docutils.nodes.make_id(s['name'])
log.warning(
'%s is an invalid docutils class name, adding alias %s'
% (s['name'], s2['name'])
)
styles2.append(s2)
self.styles.extend(styles2)
# And create reportlabs stylesheet
self.StyleSheet = StyleSheet1()
dirty = True
while dirty:
dirty = False
for s in self.styles:
if s['name'] in self.StyleSheet:
continue
try:
if 'parent' in s:
if s['parent'] is None:
if s['name'] != 'base':
s['parent'] = self.StyleSheet['base']
else:
del s['parent']
else:
s['parent'] = self.StyleSheet[s['parent']]
else:
if s['name'] != 'base':
s['parent'] = self.StyleSheet['base']
except KeyError:
dirty = True
continue
# If the style has no bulletFontName but it has a fontName, set it
if ('bulletFontName' not in s) and ('fontName' in s):
s['bulletFontName'] = s['fontName']
hasFS = True
# Adjust fontsize units
if 'fontSize' not in s:
s['fontSize'] = s['parent'].fontSize
s['trueFontSize'] = None
hasFS = False
elif 'parent' in s:
# This means you can set the fontSize to
# "2cm" or to "150%" which will be calculated
# relative to the parent style
s['fontSize'] = self.adjustUnits(
s['fontSize'], s['parent'].fontSize
)
s['trueFontSize'] = s['fontSize']
else:
# If s has no parent, it's base, which has
# an explicit point size by default and %
# makes no sense, but guess it as % of 10pt
s['fontSize'] = self.adjustUnits(s['fontSize'], 10)
# If the leading is not set, but the size is, set it
if 'leading' not in s and hasFS:
s['leading'] = 1.2 * s['fontSize']
# If the bullet font size is not set, set it as fontSize
if ('bulletFontSize' not in s) and ('fontSize' in s):
s['bulletFontSize'] = s['fontSize']
if 'spaceBefore' in s:
if isinstance(s['spaceBefore'], str) and s[
'spaceBefore'
].startswith('-'):
log.warning('A negative spaceBefore is the same as 0')
s['spaceBefore'] = self.adjustUnits(s['spaceBefore'])
if 'spaceAfter' in s:
if isinstance(s['spaceAfter'], str) and s['spaceAfter'].startswith(
'-'
):
log.warning('A negative spaceAfter is the same as 0')
s['spaceAfter'] = self.adjustUnits(s['spaceAfter'])
self.StyleSheet.add(ParagraphStyle(**s))
self.emsize = self['base'].fontSize
# Set the basefont, for Issue 65
reportlab.rl_config.canvas_basefontname = self['base'].fontName
# Set a default font for table cell styles (Issue 65)
reportlab.platypus.tables.CellStyle.fontname = self['base'].fontName
def __getitem__(self, key):
# This 'normalizes' the key.
# For example, if the key is todo_node (like sphinx uses), it will be
# converted to 'todo-node' which is a valid docutils class name.
if not re.match("^[a-z](-?[a-z0-9]+)*$", key):
key = docutils.nodes.make_id(key)
if key in self.StyleSheet:
return self.StyleSheet[key]
else:
# Sphinx 4 doesn't prepend "pygments-" to its style names when using pygments,
# so look to see if we have a pygemts style for this style name
pygments_key = "pygments-" + key
if pygments_key in self.StyleSheet:
return self.StyleSheet[pygments_key]
else:
if key.startswith('pygments'):
if not self.suppress_undefined_style_warning:
log.info(
"Using undefined style '%s'"
", aliased to style 'code'." % key
)
newst = copy(self.StyleSheet['code'])
else:
if not self.suppress_undefined_style_warning:
log.warning(
"Using undefined style '%s'"
", aliased to style 'normal'." % key
)
newst = copy(self.StyleSheet['normal'])
newst.name = key
self.StyleSheet.add(newst)
return newst
def readSheets(self, flist):
"""Read in the stylesheets. Return a list of
(sheetdata, sheetname) tuples.
Orders included sheets in front
of including sheets.
"""
# Process from end of flist
flist.reverse()
# Keep previously seen sheets in sheetdict
sheetdict = {}
result = []
while flist:
ssname = flist.pop()
data = sheetdict.get(ssname)
if data is None:
data = self.readStyle(ssname)
if data is None:
continue
sheetdict[ssname] = data
if 'options' in data and 'stylesheets' in data['options']:
flist.append(ssname)
newsheets = list(data['options']['stylesheets'])
newsheets.reverse()
flist.extend(newsheets)
continue
result.append((data, ssname))
return result
def readStyle(self, ssname):
# If callables are used, they should probably be subclassed
# strings, or something else that will print nicely for errors
if callable(ssname):
return ssname()
fname = self.findStyle(ssname)
if fname:
try:
# TODO no longer needed when we drop support for rson
# Is it an older rson/json stylesheet with .style extension?
root_ext = os.path.splitext(fname)
if root_ext[1] == ".style":
log.warning(
'Stylesheet "%s" in outdated format, recommend converting to YAML'
% (fname)
)
return rson_loads(open(fname).read())
# Otherwise assume yaml/yml
return yaml.safe_load(open(fname).read())
except ValueError as e: # Error parsing the JSON data
log.critical('Error parsing stylesheet "%s": %s' % (fname, str(e)))
except IOError as e: # Error opening the ssheet
log.critical('Error opening stylesheet "%s": %s' % (fname, str(e)))
def findStyle(self, fn):
"""Find the absolute file name for a given style filename.
Given a style filename, searches for it in StyleSearchPath
and returns the real file name.
"""
def innerFind(path, fn):
if os.path.isabs(fn):
if os.path.isfile(fn):
return fn
else:
for D in path:
tfn = os.path.join(D, fn)
if os.path.isfile(tfn):
return tfn
return None
for ext in ['', '.yaml', '.yml', '.style', '.json']:
result = innerFind(self.StyleSearchPath, fn + ext)
if result:
break
if result is None:
log.warning("Can't find stylesheet %s" % fn)
return result
def findFont(self, fn):
"""Find the absolute font name for a given font filename.
Given a font filename, searches for it in FontSearchPath
and returns the real file name.
"""
if not os.path.isabs(fn):
for D in self.FontSearchPath:
tfn = os.path.join(D, fn)
if os.path.isfile(tfn):
return str(tfn)
return str(fn)
def styleForNode(self, node):
"""Return the right default style for any kind of node.
That usually means "bodytext", but for sidebars, for
example, it's sidebar.
"""
n = docutils.nodes
styles = {
n.sidebar: 'sidebar',
n.figure: 'figure',
n.tgroup: 'table',
n.table: 'table',
n.Admonition: 'admonition',
}
return self[styles.get(node.__class__, 'bodytext')]
def tstyleHead(self, rows=1):
"""Return a table style spec for a table header of `rows`.
The style will be based on the table-heading style from the stylesheet.
"""
# This alignment thing is exactly backwards from
# the alignment for paragraphstyles
alignment = {0: 'LEFT', 1: 'CENTER', 2: 'RIGHT', 4: 'JUSTIFY', 8: 'DECIMAL'}[
self['table-heading'].alignment
]
return [
(
'BACKGROUND',
(0, 0),
(-1, rows - 1),
self['table-heading'].backColor,
),
('ALIGN', (0, 0), (-1, rows - 1), alignment),
(
'TEXTCOLOR',
(0, 0),
(-1, rows - 1),
self['table-heading'].textColor,
),
(
'FONT',
(0, 0),
(-1, rows - 1),
self['table-heading'].fontName,
self['table-heading'].fontSize,
self['table-heading'].leading,
),
('VALIGN', (0, 0), (-1, rows - 1), self['table-heading'].valign),
]
def adjustUnits(self, v, total=None, default_unit='pt'):
if total is None:
total = self.tw
return adjustUnits(v, total, self.def_dpi, default_unit, emsize=self.emsize)
def combinedStyle(self, styles):
"""Given a list of style names, it merges them (the existing ones)
and returns a new style.
The styles that don't exist are silently ignored.
For example, if called with styles=['style1','style2'] the returned
style will be called 'merged_style1_style2'.
The styles that are *later* in the list will have priority.
"""
validst = [x for x in styles if x in self.StyleSheet]
newname = '_'.join(['merged'] + validst)
validst = [self[x] for x in validst]
newst = copy(validst[0])
for st in validst[1:]:
newst.__dict__.update(st.__dict__)
newst.name = newname
return newst
def adjustUnits(v, total=None, dpi=300, default_unit='pt', emsize=10):
"""Takes something like 2cm and returns 2*cm.
If you use % as a unit, it returns the percentage of "total".
If total is not given, returns a percentage of the page width.
However, if you get to that stage, you are doing it wrong.
Example::
>>> adjustUnits('50%',200)
100
"""
if v is None or v == "":
return None
v = str(v)
length = re.split(r'(-?[0-9.]+)', v)
n = length[1]
u = default_unit
if len(length) == 3 and length[2]:
u = length[2]
if u in units.__dict__:
return float(n) * units.__dict__[u]
else:
if u == '%':
return float(n) * total / 100
elif u == 'px':
return float(n) * units.inch / dpi
elif u == 'pt':
return float(n)
elif u == 'in':
return float(n) * units.inch
elif u == 'em':
return float(n) * emsize
elif u == 'ex':
return float(n) * emsize / 2
elif u == 'pc': # picas!
return float(n) * 12
log.error('Unknown unit "%s"' % u)
return float(n)
def formatColor(value, numeric=True):
"""Convert a color like "gray" or "0xf" or "ffff"
to something ReportLab will like."""
if value in colors.__dict__:
return colors.__dict__[value]
else: # Hopefully, a hex color:
c = value.strip()
if c[0] == '#':
c = c[1:]
while len(c) < 6:
c = '0' + c
if numeric:
r = int(c[:2], 16) / 255.0
g = int(c[2:4], 16) / 255.0
b = int(c[4:6], 16) / 255.0
if len(c) >= 8:
alpha = int(c[6:8], 16) / 255.0
return colors.Color(r, g, b, alpha=alpha)
return colors.Color(r, g, b)
else:
return str("#" + c)
# The values are:
# * Minimum number of arguments
# * Maximum number of arguments
# * Valid types of arguments.
#
# For example, if option FOO takes a list a string and a number,
# but the number is optional:
#
# "FOO":(2,3,"list","string","number")
#
# The reportlab command could look like
#
# ["FOO",(0,0),(-1,-1),[1,2],"whatever",4]
#
# THe (0,0) (-1,-1) are start and stop and are mandatory.
#
# Possible types of arguments are string, number, color, colorlist
validCommands = {
# Cell format commands
"FONT": (1, 3, "string", "number", "number"),
"FONTNAME": (1, 1, "string"),
"FACE": (1, 1, "string"),
"FONTSIZE": (1, 1, "number"),
"SIZE": (1, 1, "number"),
"LEADING": (1, 1, "number"),
"TEXTCOLOR": (1, 1, "color"),
"ALIGNMENT": (1, 1, "string"),
"ALIGN": (1, 1, "string"),
"LEFTPADDING": (1, 1, "number"),
"RIGHTPADDING": (1, 1, "number"),
"TOPPADDING": (1, 1, "number"),
"BOTTOMPADDING": (1, 1, "number"),
"BACKGROUND": (1, 1, "color"),
"ROWBACKGROUNDS": (1, 1, "colorlist"),
"COLBACKGROUNDS": (1, 1, "colorlist"),
"VALIGN": (1, 1, "string"),
# Line commands
"GRID": (2, 2, "number", "color"),
"BOX": (2, 2, "number", "color"),
"OUTLINE": (2, 2, "number", "color"),
"INNERGRID": (2, 2, "number", "color"),
"LINEBELOW": (2, 2, "number", "color"),
"LINEABOVE": (2, 2, "number", "color"),
"LINEBEFORE": (2, 2, "number", "color"),
"LINEAFTER": (2, 2, "number", "color"),
# You should NOT have span commands, man!
# "SPAN":(,,),
}
def validateCommands(commands):
"""Given a list of reportlab's table commands, it fixes some common errors
and/or removes commands that can't be fixed"""
fixed = []
for command in commands:
command[0] = command[0].upper()
flag = False
# See if the command is valid
if command[0] not in validCommands:
log.error('Unknown table command %s in stylesheet', command[0])
continue
# See if start and stop are the right types
if not isinstance(command[1], (list, tuple)):
log.error(
'Start cell in table command should be list or tuple, got %s [%s]',
type(command[1]),
command[1],
)
flag = True
if not isinstance(command[2], (list, tuple)):
log.error(
'Stop cell in table command should be list or tuple, got %s [%s]',
type(command[1]),
command[1],
)
flag = True
# See if the number of arguments is right
length = len(command) - 3
if length > validCommands[command[0]][1]:
log.error('Too many arguments in table command: %s', command)
flag = True
if length < validCommands[command[0]][0]:
log.error('Too few arguments in table command: %s', command)
flag = True
# Validate argument types
for pos, arg in enumerate(command[3:]):
typ = validCommands[command[0]][pos + 2]
if typ == "color":
# Convert all 'string' colors to numeric
command[3 + pos] = formatColor(arg)
elif typ == "colorlist":
command[3 + pos] = [formatColor(c) for c in arg]
elif typ == "number":
pass
elif typ == "string":
command[3 + pos] = arg
else:
log.error("This should never happen: wrong type %s", typ)
if not flag:
fixed.append(command)
return fixed
class CallableStyleSheet(str):
"""Useful for programmatically generated stylesheets.
A generated stylesheet is a callable string (name),
which returns the pre-digested stylesheet data
when called.
"""
def __new__(cls, name, value=''):
self = str.__new__(cls, name)
self.value = value
return self
def __call__(self):
return rson_loads(self.value)
| mit | dc542aa8134531e4a5e0cdec50f7e7a3 | 36.99893 | 93 | 0.473501 | 4.475249 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/liblzma/__init__.py | 1 | 2557 | import sh
from multiprocessing import cpu_count
from os.path import exists, join
from pythonforandroid.archs import Arch
from pythonforandroid.logger import shprint
from pythonforandroid.recipe import Recipe
from pythonforandroid.util import current_directory
class LibLzmaRecipe(Recipe):
version = '5.2.4'
url = 'https://tukaani.org/xz/xz-{version}.tar.gz'
built_libraries = {'liblzma.so': 'p4a_install/lib'}
def build_arch(self, arch: Arch) -> None:
env = self.get_recipe_env(arch)
install_dir = join(self.get_build_dir(arch.arch), 'p4a_install')
with current_directory(self.get_build_dir(arch.arch)):
if not exists('configure'):
shprint(sh.Command('./autogen.sh'), _env=env)
shprint(sh.Command('autoreconf'), '-vif', _env=env)
shprint(sh.Command('./configure'),
'--host=' + arch.command_prefix,
'--prefix=' + install_dir,
'--disable-builddir',
'--disable-static',
'--enable-shared',
'--disable-xz',
'--disable-xzdec',
'--disable-lzmadec',
'--disable-lzmainfo',
'--disable-scripts',
'--disable-doc',
_env=env)
shprint(
sh.make, '-j', str(cpu_count()),
_env=env
)
shprint(sh.make, 'install', _env=env)
def get_library_includes(self, arch: Arch) -> str:
"""
Returns a string with the appropriate `-I<lib directory>` to link
with the lzma lib. This string is usually added to the environment
variable `CPPFLAGS`.
"""
return " -I" + join(
self.get_build_dir(arch.arch), 'p4a_install', 'include',
)
def get_library_ldflags(self, arch: Arch) -> str:
"""
Returns a string with the appropriate `-L<lib directory>` to link
with the lzma lib. This string is usually added to the environment
variable `LDFLAGS`.
"""
return " -L" + join(
self.get_build_dir(arch.arch), self.built_libraries['liblzma.so'],
)
@staticmethod
def get_library_libs_flag() -> str:
"""
Returns a string with the appropriate `-l<lib>` flags to link with
the lzma lib. This string is usually added to the environment
variable `LIBS`.
"""
return " -llzma"
recipe = LibLzmaRecipe()
| mit | d77da9432581d8dae0e843dfa71ae368 | 32.207792 | 78 | 0.546734 | 3.995313 | false | false | false | false |
rst2pdf/rst2pdf | rst2pdf/image.py | 1 | 15554 | # -*- coding: utf-8 -*-
from copy import copy
import glob
import os
from os.path import abspath, dirname
import sys
from urllib.request import urlretrieve
from PIL import Image as PILImage
from reportlab.platypus.flowables import Image, Flowable
from reportlab.lib.units import cm, inch
from .log import log, nodeid
try:
from .svgimage import SVGImage
except ImportError:
# svglib may optionally not be installed, which causes this error
SVGImage = None
# find base path
if hasattr(sys, 'frozen'):
PATH = abspath(dirname(sys.executable))
else:
PATH = abspath(dirname(__file__))
missing = os.path.join(PATH, 'images', 'image-missing.jpg')
def defaultimage(
filename,
width=None,
height=None,
kind='direct',
mask='auto',
lazy=1,
srcinfo=None,
):
"""Get default image backend.
We have multiple image backends, including the stock ReportLab one. This
wrapper around the ReportLab one allows us to pass the client ``RstToPdf``
object and the URI into all our backends, which they can use (or not) as
necessary.
"""
return Image(filename, width, height, kind, mask, lazy)
class MyImage(Flowable):
"""A Image subclass that can:
1. Take a ``percentage_of_container`` kind, which resizes it on ``wrap(``
to use a percentage of the container's width.
2. Take vector formats and instantiates the right "backend" flowable.
"""
warned = False
@classmethod
def support_warning(cls):
if cls.warned or PILImage:
return
cls.warned = True
log.warning(
'Support for images other than JPG is now limited. Please install '
'Pillow.'
)
@staticmethod
def split_uri(uri):
"""Split provided URI.
A really minimalistic split -- doesn't cope with http:, etc. HOWEVER,
it tries to do so in a fashion that allows a clueless user to have
``#`` inside his filename without screwing anything up.
"""
basename, extra = os.path.splitext(uri)
extra = extra.split('#', 1) + ['']
fname = basename + extra[0]
extension = extra[0][1:].lower()
options = extra[1]
return fname, extension, options
def __init__(
self,
filename,
width=None,
height=None,
kind='direct',
mask='auto',
lazy=1,
client=None,
target=None,
):
# Client is mandatory. Perhaps move it farther up if we refactor
assert client is not None
self.__kind = kind
# Maximum page height is used when resizing in wrap() as we can't render an image across a page boundary
self.max_page_height = 999999
if filename.split("://")[0].lower() in ('http', 'ftp', 'https'):
try:
filename2, _ = urlretrieve(filename)
if filename != filename2:
client.to_unlink.append(filename2)
filename = filename2
except IOError:
filename = missing
self.filename, self._backend = self.get_backend(filename, client)
srcinfo = client, self.filename
if kind == 'percentage_of_container':
self.image = self._backend(
self.filename, width, height, 'direct', mask, lazy, srcinfo
)
self.image.drawWidth = width
self.image.drawHeight = height
self.__width = width
self.__height = height
else:
self.image = self._backend(
self.filename, width, height, kind, mask, lazy, srcinfo
)
self.__ratio = float(self.image.imageWidth) / self.image.imageHeight
self.__wrappedonce = False
self.target = target
@classmethod
def raster(self, filename, client):
"""Convert image to raster image.
Takes a filename and converts it to a raster image reportlab can
process.
"""
if not os.path.exists(filename):
log.error('Missing image file: %s', filename)
return missing
try:
# First try to rasterize using the suggested backend
backend = self.get_backend(filename, client)[1]
return backend.raster(filename, client)
except Exception:
pass
# Last resort: try everything
if PILImage: # See if pil can process it
try:
PILImage.open(filename)
return filename
except Exception:
# Can't read it
pass
# PIL can't, so we can't
self.support_warning()
log.error('Could not load image: %s', filename)
return missing
@classmethod
def get_backend(self, uri, client):
"""Get backend for an image.
Given the filename of an image, returns ``(fname, backend)``, where
``fname`` is the filename to be used (could be the same as filename, or
something different if the image had to be converted or is missing),
and ``backend`` is an ``Image`` class that can handle ``fname``.
If ``uri`` ends with '.*' then the returned filename will be the best
quality supported at the moment.
That means: SVG > PNG > JPG > GIF
"""
backend = defaultimage
# Extract all the information from the URI
filename, extension, options = self.split_uri(uri)
if '*' in filename:
preferred = ['gif', 'jpg', 'png', 'svg']
# Find out what images are available
available = glob.glob(filename)
cfn = available[0]
cv = -10
for fn in available:
ext = fn.split('.')[-1]
if ext in preferred:
v = preferred.index(ext)
else:
v = -1
if v > cv:
cv = v
cfn = fn
# cfn should have our favourite type of those available
filename = cfn
extension = cfn.split('.')[-1]
uri = filename
# If the image doesn't exist, we use a 'missing' image
if not os.path.exists(filename):
log.error('Missing image file: %s', filename)
filename = missing
return filename, backend
if extension in ['svg', 'svgz']:
if SVGImage is not None:
log.info('Backend for %s is SVGImage', filename)
backend = SVGImage
else:
log.error('SVG image support requires svglib: %s', filename)
filename = missing
elif extension in ['pdf']:
log.error("PDF images are not supported")
filename = missing
elif extension != 'jpg' and not PILImage:
# No way to make this work
log.error(
'%s image support requires Pillow: %s',
extension.upper(),
filename,
)
filename = missing
return filename, backend
@classmethod
def size_for_node(self, node, client):
"""Get size for image node.
Given a docutils image node, returns the size the image should have in
the PDF document, and what "kind" of size that is. That involves lots
of guesswork.
"""
uri = str(node.get('uri'))
if uri.split('://')[0].lower() not in ('http', 'ftp', 'https'):
uri = os.path.join(client.basedir, uri)
else:
uri, _ = urlretrieve(uri)
client.to_unlink.append(uri)
srcinfo = client, uri
# Extract all the information from the URI
imgname, extension, options = self.split_uri(uri)
if not os.path.isfile(imgname):
imgname = missing
scale = float(node.get('scale', 100)) / 100
# Figuring out the size to display of an image is ... annoying.
# If the user provides a size with a unit, it's simple, adjustUnits
# will return it in points and we're done.
# However, often the unit wil be "%" (specially if it's meant for
# HTML originally. In which case, we will use a percentage of
# the containing frame.
# Find the image size in pixels:
kind = 'direct'
xdpi, ydpi = client.styles.def_dpi, client.styles.def_dpi
extension = imgname.split('.')[-1].lower()
if extension in ['svg', 'svgz']:
if not SVGImage:
raise RuntimeError(
'Documentation uses SVG image but svglib is not installed.'
)
iw, ih = SVGImage(imgname, srcinfo=srcinfo).wrap(0, 0)
# These are in pt, so convert to px
iw = iw * xdpi / 72
ih = ih * ydpi / 72
else:
keeptrying = True
if PILImage:
try:
img = PILImage.open(imgname)
img.load()
iw, ih = img.size
xdpi, ydpi = img.info.get('dpi', (xdpi, ydpi))
keeptrying = False
except IOError: # PIL throws this when it's a broken/unknown image
pass
if keeptrying:
if extension not in ['jpg', 'jpeg']:
log.error(
"The image (%s, %s) is broken or in an unknown format",
imgname,
nodeid(node),
)
raise ValueError
else:
# Can be handled by reportlab
log.warning(
"Can't figure out size of the image (%s, %s). Install PIL for better results.",
imgname,
nodeid(node),
)
iw = 1000
ih = 1000
# Try to get the print resolution from the image itself via PIL.
# If it fails, assume a DPI of 300, which is pretty much made up,
# and then a 100% size would be iw*inch/300, so we pass
# that as the second parameter to adjustUnits
#
# Some say the default DPI should be 72. That would mean
# the largest printable image in A4 paper would be something
# like 480x640. That would be awful.
#
w = node.get('width')
h = node.get('height')
if h is None and w is None: # Nothing specified
# Guess from iw, ih
log.debug(
"Using image %s without specifying size."
"Calculating based on image size at %ddpi [%s]",
imgname,
xdpi,
nodeid(node),
)
w = iw * inch / xdpi
h = ih * inch / ydpi
elif w is not None:
# Node specifies only w
# In this particular case, we want the default unit
# to be pixels so we work like rst2html
if w[-1] == '%':
kind = 'percentage_of_container'
w = int(w[:-1])
else:
# This uses default DPI setting because we
# are not using the image's "natural size"
# this is what LaTeX does, according to the
# docutils mailing list discussion
w = client.styles.adjustUnits(w, client.styles.tw, default_unit='px')
if h is None:
# h is set from w with right aspect ratio
h = w * ih / iw
else:
h = client.styles.adjustUnits(h, ih * inch / ydpi, default_unit='px')
elif h is not None and w is None:
if h[-1] != '%':
h = client.styles.adjustUnits(h, ih * inch / ydpi, default_unit='px')
# w is set from h with right aspect ratio
w = h * iw / ih
else:
log.error(
'Setting height as a percentage does **not** work. '
'ignoring height parameter [%s]',
nodeid(node),
)
# Set both from image data
w = iw * inch / xdpi
h = ih * inch / ydpi
# Apply scale factor
w = w * scale
h = h * scale
# And now we have this probably completely bogus size!
log.info(
'Image %s size calculated: %fcm by %fcm [%s]',
imgname,
w / cm,
h / cm,
nodeid(node),
)
return w, h, kind
def _restrictSize(self, aW, aH):
return self.image._restrictSize(aW, aH)
def _unRestrictSize(self, aW, aH):
return self.image._unRestrictSize(aW, aH)
def __deepcopy__(self, *whatever):
# ImageCore class is not deep copyable. Stop the copy at this
# class. If you remove this, re-test for issue #126.
return copy(self)
def wrap(self, availWidth, availHeight):
if self.max_page_height < availHeight:
# We can't render an image across a page boundary, so this is the tallest we can be
availHeight = self.max_page_height
if self.__kind == 'percentage_of_container':
w, h = self.__width, self.__height
if not w:
log.warning(
'Scaling image as % of container with w unset. '
'This should not happen, setting to 100'
)
w = 100
scale = w / 100.0
w = availWidth * scale
h = w / self.__ratio
self.image.drawWidth, self.image.drawHeight = w, h
return w, h
else:
if self.image.drawHeight > availHeight:
if not getattr(self, '_atTop', True):
return self.image.wrap(availWidth, availHeight)
else:
# It's the first thing in the frame, probably
# Wrapping it will not make it work, so we
# adjust by height
# FIXME get rst file info (line number)
# here for better error message
log.warning(
'Image %s is too tall for the frame, rescaling', self.filename
)
self.image.drawHeight = availHeight
self.image.drawWidth = availHeight * self.__ratio
elif self.image.drawWidth > availWidth:
log.warning(
'Image %s is too wide for the frame, rescaling', self.filename
)
self.image.drawWidth = availWidth
self.image.drawHeight = availWidth / self.__ratio
return self.image.wrap(availWidth, availHeight)
def drawOn(self, canv, x, y, _sW=0):
if self.target:
offset = 0
if self.image.hAlign == 'CENTER':
offset = _sW / 2.0
elif self.image.hAlign == 'RIGHT':
offset = _sW
canv.linkURL(
self.target,
(
x + offset,
y,
x + offset + self.image.drawWidth,
y + self.image.drawHeight,
),
relative=True,
# thickness = 3,
)
return self.image.drawOn(canv, x, y, _sW)
| mit | 3c0365aae9022ae91e1f2cba042ebcc5 | 33.184615 | 112 | 0.520381 | 4.428815 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/ifaddrs/__init__.py | 5 | 2114 | """ ifaddrs for Android
"""
from os.path import join, exists
import sh
from pythonforandroid.logger import info, shprint
from pythonforandroid.recipe import CompiledComponentsPythonRecipe
from pythonforandroid.toolchain import current_directory
class IFAddrRecipe(CompiledComponentsPythonRecipe):
version = '8f9a87c'
url = 'https://github.com/morristech/android-ifaddrs/archive/{version}.zip'
depends = ['hostpython3']
call_hostpython_via_targetpython = False
site_packages_name = 'ifaddrs'
generated_libraries = ['libifaddrs.so']
def prebuild_arch(self, arch):
"""Make the build and target directories"""
path = self.get_build_dir(arch.arch)
if not exists(path):
info("creating {}".format(path))
shprint(sh.mkdir, '-p', path)
def build_arch(self, arch):
"""simple shared compile"""
env = self.get_recipe_env(arch, with_flags_in_cc=False)
for path in (
self.get_build_dir(arch.arch),
join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Lib'),
join(self.ctx.python_recipe.get_build_dir(arch.arch), 'Include')):
if not exists(path):
info("creating {}".format(path))
shprint(sh.mkdir, '-p', path)
cli = env['CC'].split()[0]
# makes sure first CC command is the compiler rather than ccache, refs:
# https://github.com/kivy/python-for-android/issues/1398
if 'ccache' in cli:
cli = env['CC'].split()[1]
cc = sh.Command(cli)
with current_directory(self.get_build_dir(arch.arch)):
cflags = env['CFLAGS'].split()
cflags.extend(['-I.', '-c', '-l.', 'ifaddrs.c', '-I.'])
shprint(cc, *cflags, _env=env)
cflags = env['CFLAGS'].split()
cflags.extend(['-shared', '-I.', 'ifaddrs.o', '-o', 'libifaddrs.so'])
cflags.extend(env['LDFLAGS'].split())
shprint(cc, *cflags, _env=env)
shprint(sh.cp, 'libifaddrs.so', self.ctx.get_libs_dir(arch.arch))
recipe = IFAddrRecipe()
| mit | a5094fdd7b9b512b6e1ce4d87de21262 | 38.148148 | 82 | 0.599811 | 3.626072 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/gevent/__init__.py | 5 | 1567 | import re
from pythonforandroid.logger import info
from pythonforandroid.recipe import CythonRecipe
class GeventRecipe(CythonRecipe):
version = '1.4.0'
url = 'https://pypi.python.org/packages/source/g/gevent/gevent-{version}.tar.gz'
depends = ['librt', 'setuptools']
patches = ["cross_compiling.patch"]
def get_recipe_env(self, arch=None, with_flags_in_cc=True):
"""
- Moves all -I<inc> -D<macro> from CFLAGS to CPPFLAGS environment.
- Moves all -l<lib> from LDFLAGS to LIBS environment.
- Copies all -l<lib> from LDLIBS to LIBS environment.
- Fixes linker name (use cross compiler) and flags (appends LIBS)
"""
env = super().get_recipe_env(arch, with_flags_in_cc)
# CFLAGS may only be used to specify C compiler flags, for macro definitions use CPPFLAGS
regex = re.compile(r'(?:\s|^)-[DI][\S]+')
env['CPPFLAGS'] = ''.join(re.findall(regex, env['CFLAGS'])).strip()
env['CFLAGS'] = re.sub(regex, '', env['CFLAGS'])
info('Moved "{}" from CFLAGS to CPPFLAGS.'.format(env['CPPFLAGS']))
# LDFLAGS may only be used to specify linker flags, for libraries use LIBS
regex = re.compile(r'(?:\s|^)-l[\w\.]+')
env['LIBS'] = ''.join(re.findall(regex, env['LDFLAGS'])).strip()
env['LIBS'] += ' {}'.format(''.join(re.findall(regex, env['LDLIBS'])).strip())
env['LDFLAGS'] = re.sub(regex, '', env['LDFLAGS'])
info('Moved "{}" from LDFLAGS to LIBS.'.format(env['LIBS']))
return env
recipe = GeventRecipe()
| mit | 7d7935091670f251f11a3dba756db03a | 45.088235 | 97 | 0.613274 | 3.482222 | false | false | false | false |
rst2pdf/rst2pdf | rst2pdf/tests/input/sphinx-issue285/conf.py | 1 | 1163 | # -*- coding: utf-8 -*-
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['rst2pdf.pdfbuilder']
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Issue 285'
copyright = u'2010, Roberto Alsina'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0'
# The full version, including alpha/beta/rc tags.
release = '0.0'
# -- Options for PDF output ----------------------------------------------------
# Grouping the document tree into PDF files. List of tuples
# (source start file, target name, title, author).
pdf_documents = [('index', 'Issue285', u'Issue 285 Documentation', u'Roberto Alsina')]
pdf_break_level = 3
pdf_verbosity = 0
pdf_invariant = True
pdf_real_footnotes = True
# Set a consistent date for the cover page
today = 'April 29, 2018' | mit | ca3f861dbb126505f873ffb9dbb91e3a | 29.631579 | 86 | 0.656922 | 3.775974 | false | false | false | false |
rst2pdf/rst2pdf | rst2pdf/createpdf.py | 1 | 65875 | # -*- coding: utf-8 -*-
# See LICENSE.txt for licensing terms
# Some fragments of code are copied from Reportlab under this license:
#
#####################################################################################
#
# Copyright (c) 2000-2008, ReportLab Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the company nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE OFFICERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
#####################################################################################
__docformat__ = 'reStructuredText'
from importlib import import_module
import sys
import os
import re
import logging
from urllib.parse import urlunparse
from os.path import abspath, dirname, expanduser, join
from copy import copy, deepcopy
from optparse import OptionParser
import docutils.readers.doctree
import docutils.core
import docutils.nodes
from docutils.parsers.rst import directives
from docutils.readers import standalone
from docutils.transforms import Transform
try:
from roman import toRoman
except ImportError:
from docutils.utils.roman import toRoman
import reportlab
from reportlab.lib.units import cm
from reportlab.platypus import doctemplate
from reportlab.platypus.doctemplate import (
ActionFlowable,
BaseDocTemplate,
FrameActionFlowable,
IndexingFlowable,
LayoutError,
PageTemplate,
)
from reportlab.platypus.flowables import (
_listWrapOn,
_Container,
Flowable,
ImageAndFlowables,
PageBreak,
SlowPageBreak,
)
from reportlab.platypus.paragraph import Paragraph
from reportlab.platypus.tables import TableStyle
from . import config
from rst2pdf.directives import code_block
from rst2pdf import flowables
from rst2pdf.flowables import (
BoundByWidth,
DelayedTable,
Heading,
MyPageBreak,
MySpacer,
OddEven,
Separation,
SmartFrame,
XXPreformatted,
)
from rst2pdf.sinker import Sinker
from rst2pdf.image import MyImage, missing
from rst2pdf.log import log, nodeid
from smartypants import smartypants
from rst2pdf import styles as sty
from rst2pdf.nodehandlers import nodehandlers
from rst2pdf.languages import get_language_available
# Template engine for covers
import jinja2
# Side effects
from rst2pdf.directives import aafigure # noqa
from rst2pdf.directives import contents # noqa
from rst2pdf.directives import oddeven # noqa
from rst2pdf.roles import counter as counter_role # noqa
from rst2pdf.roles import package as package_role # noqa
try:
import sphinx as sphinx_module
except ImportError:
sphinx_module = None
numberingstyles = {
'arabic': 'ARABIC',
'roman': 'ROMAN_UPPER',
'lowerroman': 'ROMAN_LOWER',
'alpha': 'LETTERS_UPPER',
'loweralpha': 'LETTERS_LOWER',
}
class RstToPdf(object):
def __init__(
self,
stylesheets=[],
language='en_US',
header=None,
footer=None,
inlinelinks=False,
breaklevel=1,
font_path=[],
style_path=[],
fit_mode='shrink',
background_fit_mode='center',
sphinx=False,
smarty='0',
baseurl=None,
repeat_table_rows=False,
footnote_backlinks=True,
inline_footnotes=False,
real_footnotes=False,
def_dpi=300,
show_frame=False,
highlightlang='python', # this one is only used by Sphinx
basedir=os.getcwd(),
splittables=False,
blank_first_page=False,
first_page_on_right=False,
breakside='odd',
custom_cover='cover.tmpl',
floating_images=False,
numbered_links=False,
section_header_depth=2,
toc_depth=0,
raw_html=False,
strip_elements_with_classes=[],
):
self.debugLinesPdf = False
self.depth = 0
self.breakside = breakside
self.first_page_on_right = first_page_on_right
self.blank_first_page = blank_first_page
self.splittables = splittables
self.basedir = basedir
self.language, self.docutils_language = get_language_available(language)[:2]
self.doc_title = ""
self.doc_title_clean = ""
self.doc_subtitle = ""
self.doc_author = ""
self.header = header
self.footer = footer
self.custom_cover = custom_cover
self.floating_images = floating_images
self.decoration = {
'header': header,
'footer': footer,
'endnotes': [],
'extraflowables': [],
}
# find base path
if hasattr(sys, 'frozen'):
self.PATH = abspath(dirname(sys.executable))
else:
self.PATH = abspath(dirname(__file__))
self.font_path = font_path
self.style_path = style_path
self.def_dpi = def_dpi
self.loadStyles(stylesheets)
self.docutils_languages = {}
self.inlinelinks = inlinelinks
self.breaklevel = breaklevel
self.fit_mode = fit_mode
self.background_fit_mode = background_fit_mode
self.to_unlink = []
# See https://pythonhosted.org/smartypants/reference.html#smartypants-module
self.smartypants_attributes = 0
if smarty == '1':
self.smartypants_attributes = 1 | 6 | 8 | 64 | 512
elif smarty == '2':
self.smartypants_attributes = 1 | 6 | 24 | 64 | 512
elif smarty == '3':
self.smartypants_attributes = 1 | 6 | 40 | 64 | 512
self.baseurl = baseurl
self.repeat_table_rows = repeat_table_rows
self.footnote_backlinks = footnote_backlinks
self.inline_footnotes = inline_footnotes
self.real_footnotes = real_footnotes
# Real footnotes are always a two-pass thing.
if self.real_footnotes:
self.mustMultiBuild = True
self.def_dpi = def_dpi
self.show_frame = show_frame
self.numbered_links = numbered_links
self.section_header_depth = section_header_depth
self.toc_depth = toc_depth
self.img_dir = os.path.join(self.PATH, 'images')
self.raw_html = raw_html
self.strip_elements_with_classes = strip_elements_with_classes
# Sorry about this, but importing sphinx.roles makes some
# ordinary documents fail (demo.txt specifically) so
# I can' t just try to import it outside. I need
# to do it only if it's requested
if sphinx and sphinx_module:
import sphinx.roles
from rst2pdf.sphinxnodes import sphinxhandlers
self.highlightlang = highlightlang
self.gen_pdftext, self.gen_elements = sphinxhandlers(self)
else:
# These rst2pdf extensions conflict with sphinx
directives.register_directive('code-block', code_block.code_block_directive)
directives.register_directive('code', code_block.code_block_directive)
self.gen_pdftext, self.gen_elements = nodehandlers(self)
self.sphinx = sphinx
if not self.styles.languages:
self.styles.languages = []
if self.language:
self.styles.languages.append(self.language)
self.styles['bodytext'].language = self.language
else:
self.styles.languages.append('en_US')
self.styles['bodytext'].language = 'en_US'
# Load the docutils language modules for all required languages
for lang in self.styles.languages:
self.docutils_languages[lang] = get_language_available(lang)[2]
self.pending_targets = []
self.targets = []
def loadStyles(self, styleSheets=None):
if styleSheets is None:
styleSheets = []
self.styles = sty.StyleSheet(
styleSheets, self.font_path, self.style_path, def_dpi=self.def_dpi
)
def style_language(self, style):
"""Return language corresponding to this style."""
try:
return style.language
except AttributeError:
pass
try:
return self.styles['bodytext'].language
except AttributeError:
return os.environ['LANG'] or 'en'
def text_for_label(self, label, style):
"""Translate text for label."""
try:
text = self.docutils_languages[self.style_language(style)].labels[label]
except KeyError:
text = label.capitalize()
return text
def text_for_bib_field(self, field, style):
"""Translate text for bibliographic fields."""
try:
text = self.docutils_languages[
self.style_language(style)
].bibliographic_fields[field]
except KeyError:
text = field
return text + ":"
def author_separator(self, style):
"""Return separator string for authors."""
try:
sep = self.docutils_languages[self.style_language(style)].author_separators[
0
]
except KeyError:
sep = ';'
return sep + " "
def styleToTags(self, style):
"""Takes a style name, returns a pair of opening/closing tags for it, like
"<font face=helvetica size=14 color=red>". Used for inline
nodes (custom interpreted roles)"""
try:
s = self.styles[style]
r1 = [
'<font face="%s" color="#%s" ' % (s.fontName, s.textColor.hexval()[2:])
]
bc = s.backColor
if bc:
r1.append('backColor="#%s"' % bc.hexval()[2:])
if s.trueFontSize:
r1.append('size="%d"' % s.fontSize)
r1.append('>')
r2 = ['</font>']
if s.strike:
r1.append('<strike>')
r2.insert(0, '</strike>')
if s.underline:
r1.append('<u>')
r2.insert(0, '</u>')
return [''.join(r1), ''.join(r2)]
except KeyError:
log.warning('Unknown class %s', style)
return None
def styleToFont(self, style):
"""Takes a style name, returns a font tag for it, like
"<font face=helvetica size=14 color=red>". Used for inline
nodes (custom interpreted roles)"""
try:
s = self.styles[style]
r = [
'<font face="%s" color="#%s" ' % (s.fontName, s.textColor.hexval()[2:])
]
bc = s.backColor
if bc:
r.append('backColor="#%s"' % bc.hexval()[2:])
if s.trueFontSize:
r.append('size="%d"' % s.fontSize)
r.append('>')
return ''.join(r)
except KeyError:
log.warning('Unknown class %s', style)
return None
def gather_pdftext(self, node, replaceEnt=True):
return ''.join([self.gen_pdftext(n, replaceEnt) for n in node.children])
def gather_elements(self, node, style=None):
if style is None:
style = self.styles.styleForNode(node)
r = []
if 'float' in style.__dict__:
style = None # Don't pass floating styles to children!
for n in node.children:
r.extend(self.gen_elements(n, style=style))
return r
def bullet_for_node(self, node):
"""Takes a node, assumes it's some sort of
item whose parent is a list, and
returns the bullet text it should have"""
b = ""
t = 'item'
if node.parent.get('start'):
start = int(node.parent.get('start'))
else:
start = 1
if node.parent.get('bullet') or isinstance(
node.parent, docutils.nodes.bullet_list
):
b = node.parent.get('bullet', '*')
if b == "None":
b = ""
t = 'bullet'
elif node.parent.get('enumtype') == 'arabic':
b = str(node.parent.children.index(node) + start) + '.'
elif node.parent.get('enumtype') == 'lowerroman':
b = toRoman(node.parent.children.index(node) + start).lower() + '.'
elif node.parent.get('enumtype') == 'upperroman':
b = toRoman(node.parent.children.index(node) + start).upper() + '.'
elif node.parent.get('enumtype') == 'loweralpha':
b = (
'abcdefghijklmnopqrstuvwxyz'[
node.parent.children.index(node) + start - 1
]
+ '.'
)
elif node.parent.get('enumtype') == 'upperalpha':
b = (
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[
node.parent.children.index(node) + start - 1
]
+ '.'
)
else:
log.critical("Unknown kind of list_item %s [%s]", node.parent, nodeid(node))
return b, t
def filltable(self, rows):
"""
Takes a list of rows, consisting of cells and performs the following fixes:
* For multicolumn cells, add continuation cells, to make all rows the same
size. These cells have to be multirow if the original cell is multirow.
* For multirow cell, insert continuation cells, to make all columns the
same size.
* If there are still shorter rows, add empty cells at the end (ReST quirk)
* Once the table is *normalized*, create spans list, fitting for reportlab's
Table class.
"""
# If there is a multicol cell, we need to insert Continuation Cells
# to make all rows the same length
for y in range(0, len(rows)):
for x in range(len(rows[y]) - 1, -1, -1):
cell = rows[y][x]
if isinstance(cell, str):
continue
if cell.get("morecols"):
for i in range(0, cell.get("morecols")):
e = docutils.nodes.entry("")
e["morerows"] = cell.get("morerows", 0)
rows[y].insert(x + 1, e)
for y in range(0, len(rows)):
for x in range(0, len(rows[y])):
cell = rows[y][x]
if isinstance(cell, str):
continue
if cell.get("morerows"):
for i in range(0, cell.get("morerows")):
rows[y + i + 1].insert(x, "")
# If a row is shorter, add empty cells at the right end
maxw = max([len(r) for r in rows])
for r in rows:
while len(r) < maxw:
r.append("")
# Create spans list for reportlab's table style
spans = []
for y in range(0, len(rows)):
for x in range(0, len(rows[y])):
cell = rows[y][x]
if isinstance(cell, str):
continue
if cell.get("morecols"):
mc = cell.get("morecols")
else:
mc = 0
if cell.get("morerows"):
mr = cell.get("morerows")
else:
mr = 0
if mc or mr:
spans.append(('SPAN', (x, y), (x + mc, y + mr)))
return spans
def PreformattedFit(self, text, style):
"""Preformatted section that gets horizontally compressed if needed."""
# Pass a ridiculous size, then it will shrink to what's available
# in the frame
return BoundByWidth(
2000 * cm,
content=[XXPreformatted(text, style)],
mode=self.fit_mode,
style=style,
)
def createPdf(
self,
text=None,
source_path=None,
output=None,
doctree=None,
compressed=False,
# This adds entries to the PDF TOC
# matching the rst source lines
debugLinesPdf=False,
):
"""Create a PDF from text (ReST input),
or doctree (docutil nodes) and save it in outfile.
If outfile is a string, it's a filename.
If it's something with a write method, (like a StringIO,
or a file object), the data is saved there.
"""
self.decoration = {
'header': self.header,
'footer': self.footer,
'endnotes': [],
'extraflowables': [],
}
self.pending_targets = []
self.targets = []
self.debugLinesPdf = debugLinesPdf
if doctree is None:
if text is not None:
if self.language:
settings_overrides = {'language_code': self.docutils_language}
else:
settings_overrides = {}
settings_overrides[
'strip_elements_with_classes'
] = self.strip_elements_with_classes
settings_overrides['exit_status_level'] = 3
try:
self.doctree = docutils.core.publish_doctree(
text,
source_path=source_path,
settings_overrides=settings_overrides,
)
log.debug(self.doctree)
except Exception as e:
if log.isEnabledFor(logging.INFO):
# Log exception with traceback if more detailed logging has been set
log.exception('Error generating doctree')
else:
log.error(f"Error generating doctree: {e}")
log.error("Cannot generate PDF, exiting")
return 1
else:
log.error('Error: createPdf needs a text or a doctree')
return 1
else:
self.doctree = doctree
if self.numbered_links:
# Transform all links to sections so they show numbers
from .sectnumlinks import SectNumFolder, SectRefExpander
snf = SectNumFolder(self.doctree)
self.doctree.walk(snf)
srf = SectRefExpander(self.doctree, snf.sectnums)
self.doctree.walk(srf)
if self.strip_elements_with_classes:
from docutils.transforms.universal import StripClassesAndElements
sce = StripClassesAndElements(self.doctree)
sce.apply()
if self.toc_depth == 0:
# use the `:depth:` option from `.. contents::`
self.toc_depth = contents.Contents.depth
try:
elements = self.gen_elements(self.doctree)
except Exception as e:
if log.isEnabledFor(logging.INFO):
# Log exception with traceback if more detailed logging has been set
log.exception('Error generating document elements')
else:
log.error(f"Error generating document elements: {e}")
log.error("Cannot generate PDF, exiting")
return 1
# Find cover template, save it in cover_file
jinja_env = jinja2.Environment(
loader=jinja2.FileSystemLoader(
[
self.basedir,
os.path.expanduser('~/.rst2pdf'),
os.path.join(self.PATH, 'templates'),
]
),
autoescape=jinja2.select_autoescape(['html', 'xml']),
)
try:
template = jinja_env.get_template(self.custom_cover)
except jinja2.TemplateNotFound:
log.error("Can't find cover template %s, using default" % self.custom_cover)
template = jinja_env.get_template('cover.tmpl')
# Feed data to the template, get restructured text.
cover_text = template.render(title=self.doc_title, subtitle=self.doc_subtitle)
# This crashes sphinx because .. class:: in sphinx is
# something else. Ergo, pdfbuilder does it in its own way.
if not self.sphinx:
elements = (
self.gen_elements(
publish_secondary_doctree(cover_text, self.doctree, source_path)
)
+ elements
)
if self.blank_first_page:
elements.insert(0, PageBreak())
# Put the endnotes at the end ;-)
endnotes = self.decoration['endnotes']
if endnotes:
elements.append(MySpacer(1, 2 * cm))
elements.append(Separation())
for n in self.decoration['endnotes']:
t_style = TableStyle(self.styles['endnote'].commands)
colWidths = self.styles['endnote'].colWidths
elements.append(
DelayedTable([[n[0], n[1]]], style=t_style, colWidths=colWidths)
)
if self.floating_images:
# Handle images with alignment more like in HTML
new_elem = []
for i, e in enumerate(elements[::-1]):
if isinstance(e, MyImage) and e.image.hAlign != 'CENTER' and new_elem:
# This is an image where flowables should wrap
# around it
popped = new_elem.pop()
new_elem.append(
ImageAndFlowables(e, popped, imageSide=e.image.hAlign.lower())
)
else:
new_elem.append(e)
elements = new_elem
elements.reverse()
head = self.decoration['header']
foot = self.decoration['footer']
# So, now, create the FancyPage with the right sizes and elements
FP = FancyPage("fancypage", head, foot, self)
def cleantags(s):
re.sub(r'<[^>]*?>', '', str(s).strip())
pdfdoc = FancyDocTemplate(
output,
pageTemplates=[FP],
showBoundary=0,
pagesize=self.styles.ps,
title=self.doc_title_clean,
author=self.doc_author,
pageCompression=compressed,
)
pdfdoc.client = self
# Handle totally empty documents (Issue #547)
if not elements:
elements.append(Paragraph("", style=self.styles['base']))
if getattr(self, 'mustMultiBuild', False):
# Force a multibuild pass
if not isinstance(elements[-1], UnhappyOnce):
log.info('Forcing second pass so Total pages work')
elements.append(UnhappyOnce())
while True:
try:
log.info("Starting build")
self.elements = elements
# See if this *must* be multipass
pdfdoc.multiBuild(elements)
# Force a multibuild pass
# FIXME: since mustMultiBuild is set by the
# first pass in the case of ###Total###, then we
# make a new forced two-pass build. This is broken.
# conceptually.
if getattr(self, 'mustMultiBuild', False):
# Force a multibuild pass
if not isinstance(elements[-1], UnhappyOnce):
log.info('Forcing second pass so Total pages work')
elements.append(UnhappyOnce())
continue
# Rearrange footnotes if needed
if self.real_footnotes:
newStory = []
fnPile = []
for e in elements:
if getattr(e, 'isFootnote', False):
# Add it to the pile
# if not isinstance (e, MySpacer):
fnPile.append(e)
elif getattr(e, '_atTop', False) or isinstance(
e, (UnhappyOnce, MyPageBreak)
):
if fnPile:
fnPile.insert(0, Separation())
newStory.append(Sinker(fnPile))
newStory.append(e)
fnPile = []
else:
newStory.append(e)
elements = newStory + fnPile
for e in elements:
if hasattr(e, '_postponed'):
delattr(e, '_postponed')
self.real_footnotes = False
continue
break
except ValueError:
# FIXME: cross-document links come through here, which means
# an extra pass per cross-document reference. Which sucks.
# if v.args and str(v.args[0]).startswith('format not resolved'):
# missing=str(v.args[0]).split(' ')[-1]
# log.error('Adding missing reference to %s and rebuilding. This is slow!'%missing)
# elements.append(Reference(missing))
# for e in elements:
# if hasattr(e,'_postponed'):
# delattr(e,'_postponed')
# else:
# raise
raise
# doc = SimpleDocTemplate("phello.pdf")
# doc.build(elements)
for fn in self.to_unlink:
try:
os.unlink(fn)
except OSError:
pass
return 0
class FancyDocTemplate(BaseDocTemplate):
def onProgress(self, typ, value):
global _counter
message = ''
if typ == 'SIZE_EST':
log.debug(f'Number of flowables: {value}')
elif typ == 'PROGRESS':
message = f'Flowable {value}'
# add class name for this flowable if we can
if hasattr(self.client, 'elements'):
if 0 <= value < len(self.client.elements):
element = self.client.elements[value]
message += f" {type(element)}"
log.debug(f'Page {_counter}: {message}')
def afterInit(self):
self.setProgressCallBack(self.onProgress)
def afterFlowable(self, flowable):
if isinstance(flowable, Heading):
# Notify TOC entry for headings/abstracts/dedications.
level, text = flowable.level, flowable.text
parent_id = flowable.parent_id
node = flowable.node
pagenum = setPageCounter()
self.notify('TOCEntry', (level, text, pagenum, parent_id, node))
def handle_flowable(self, flowables):
'''try to handle one flowable from the front of list flowables.'''
# this method is copied from reportlab
# allow document a chance to look at, modify or ignore
# the object(s) about to be processed
self.filterFlowables(flowables)
self.handle_breakBefore(flowables)
self.handle_keepWithNext(flowables)
f = flowables[0]
del flowables[0]
if f is None:
return
if isinstance(f, PageBreak):
if isinstance(f, SlowPageBreak):
self.handle_pageBreak(slow=1)
else:
self.handle_pageBreak()
self.afterFlowable(f)
elif isinstance(f, ActionFlowable):
f.apply(self)
self.afterFlowable(f)
else:
frame = self.frame
canv = self.canv
# try to fit it then draw it
if frame.add(f, canv, trySplit=self.allowSplitting):
if not isinstance(f, FrameActionFlowable):
self._curPageFlowableCount += 1
self.afterFlowable(f)
doctemplate._addGeneratedContent(flowables, frame)
else:
if self.allowSplitting:
# see if this is a splittable thing
S = frame.split(f, canv)
n = len(S)
else:
n = 0
if n:
if not isinstance(S[0], (PageBreak, SlowPageBreak, ActionFlowable)):
if frame.add(S[0], canv, trySplit=0):
self._curPageFlowableCount += 1
self.afterFlowable(S[0])
doctemplate._addGeneratedContent(flowables, frame)
else:
ident = "Splitting error(n==%d) on page %d in\n%s" % (
n,
self.page,
self._fIdent(f, 60, frame),
)
# leave to keep apart from the raise
raise LayoutError(ident)
del S[0]
for i, f in enumerate(S):
flowables.insert(i, f) # put split flowables back on the list
else:
if hasattr(f, '_postponed') and f._postponed > 4:
ident = (
"Flowable %s%s too large on page %d in frame %r%s of template %r"
% (
self._fIdent(f, 60, frame),
doctemplate._fSizeString(f),
self.page,
self.frame.id,
self.frame._aSpaceString(),
self.pageTemplate.id,
)
)
# leave to keep apart from the raise
raise LayoutError(ident)
# this ought to be cleared when they are finally drawn!
f._postponed = 1
mbe = getattr(self, '_multiBuildEdits', None)
if mbe:
mbe((delattr, f, '_postponed'))
flowables.insert(0, f) # put the flowable back
self.handle_frameEnd()
_counter = 0
_counterStyle = 'arabic'
class PageCounter(Flowable):
def __init__(self, number=0, style='arabic'):
self.style = str(style).lower()
self.number = int(number)
Flowable.__init__(self)
def wrap(self, availWidth, availHeight):
global _counter, _counterStyle
_counterStyle = self.style
_counter = self.number
return (self.width, self.height)
def drawOn(self, canvas, x, y, _sW):
pass
flowables.PageCounter = PageCounter
def setPageCounter(counter=None, style=None):
global _counter, _counterStyle
if counter is not None:
_counter = counter
if style is not None:
_counterStyle = style
if _counterStyle == 'lowerroman':
ptext = toRoman(_counter).lower()
elif _counterStyle == 'roman':
ptext = toRoman(_counter).upper()
elif _counterStyle == 'alpha':
ptext = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[_counter % 26]
elif _counterStyle == 'loweralpha':
ptext = 'abcdefghijklmnopqrstuvwxyz'[_counter % 26]
else:
ptext = str(_counter)
return ptext
class MyContainer(_Container, Flowable):
pass
class UnhappyOnce(IndexingFlowable):
"""An indexing flowable that is only unsatisfied once.
If added to a story, it will make multiBuild run
at least two passes. Useful for ###Total###"""
_unhappy = True
def isSatisfied(self):
if self._unhappy:
self._unhappy = False
return False
return True
def draw(self):
pass
class HeaderOrFooter(object):
"""A helper object for FancyPage (below)
HeaderOrFooter handles operations which are common
to both headers and footers
"""
def __init__(self, items=None, isfooter=False, client=None):
self.items = items
if isfooter:
locinfo = 'footer showFooter defaultFooter footerSeparator'
else:
locinfo = 'header showHeader defaultHeader headerSeparator'
self.isfooter = isfooter
self.loc, self.showloc, self.defaultloc, self.addsep = locinfo.split()
self.totalpages = 0
self.client = client
def prepare(self, pageobj, canv, doc):
showloc = pageobj.template.get(self.showloc, True)
height = 0
items = self.items
if showloc:
if not items:
items = pageobj.template.get(self.defaultloc)
if items:
items = self.client.gen_elements(
publish_secondary_doctree(items, self.client.doctree, None)
)
if items:
if isinstance(items, list):
items = items[:]
else:
items = [Paragraph(items, pageobj.styles[self.loc])]
addsep = pageobj.template.get(self.addsep, False)
if addsep:
if self.isfooter:
items.insert(0, Separation())
else:
items.append(Separation())
_, height = _listWrapOn(items, pageobj.tw, canv)
self.prepared = height and items
return height
def replaceTokens(self, elems, canv, doc, smarty):
"""Put doc_title/page number/etc in text of header/footer."""
# Make sure page counter is up to date
pnum = setPageCounter()
def replace(text):
# Ensure text is unicode
if isinstance(text, bytes):
try:
text = text.decode(e.encoding)
except (AttributeError, TypeError):
text = text.decode('utf-8')
text = text.replace(u'###Page###', pnum)
if '###Total###' in text:
text = text.replace(u'###Total###', str(self.totalpages))
self.client.mustMultiBuild = True
text = text.replace(u"###Title###", doc.title)
text = text.replace(u"###Section###", getattr(canv, 'sectName', ''))
text = text.replace(u"###SectNum###", getattr(canv, 'sectNum', ''))
text = smartypants(text, smarty)
return text
for i, e in enumerate(elems):
# TODO: implement a search/replace for arbitrary things
if isinstance(e, Paragraph):
text = replace(e.text)
elems[i] = Paragraph(text, e.style)
elif isinstance(e, DelayedTable):
data = deepcopy(e.data)
for r, row in enumerate(data):
for c, cell in enumerate(row):
if isinstance(cell, list):
data[r][c] = self.replaceTokens(cell, canv, doc, smarty)
else:
row[c] = self.replaceTokens([cell], canv, doc, smarty)[0]
elems[i] = DelayedTable(data, e._colWidths, e.style)
elif isinstance(e, BoundByWidth):
for index, item in enumerate(e.content):
if isinstance(item, Paragraph):
e.content[index] = Paragraph(replace(item.text), item.style)
elems[i] = e
elif isinstance(e, OddEven):
odd = self.replaceTokens([e.odd], canv, doc, smarty)[0]
even = self.replaceTokens([e.even], canv, doc, smarty)[0]
elems[i] = OddEven(odd, even)
return elems
def draw(self, pageobj, canv, doc, x, y, width, height):
self.totalpages = max(self.totalpages, doc.page)
items = self.prepared
if items:
self.replaceTokens(items, canv, doc, pageobj.smartypants_attributes)
container = MyContainer()
container._content = items
container.width = width
container.height = height
container.drawOn(canv, x, y)
class FancyPage(PageTemplate):
"""A page template that handles changing layouts."""
def __init__(self, _id, _head, _foot, client):
self.client = client
self.styles = client.styles
self._head = HeaderOrFooter(_head, client=client)
self._foot = HeaderOrFooter(_foot, True, client)
self.smartypants_attributes = client.smartypants_attributes
self.show_frame = client.show_frame
self.image_cache = {}
PageTemplate.__init__(self, _id, [])
def draw_background(self, which, canv):
"""Draws a background and/or foreground image
on each page which uses the template.
Calculates the image one time, and caches
it for reuse on every page in the template.
How the background is drawn depends on the
--fit-background-mode option.
If desired, we could add code to push it around
on the page, using stylesheets to align and/or
set the offset.
"""
uri = self.template[which]
background_fit_mode = self.client.background_fit_mode
if 'background_fit_mode' in self.template:
background_fit_mode = self.template['background_fit_mode']
info = self.image_cache.get(f"{uri}{background_fit_mode}")
if info is None:
fname, _, _ = MyImage.split_uri(uri)
if not os.path.exists(fname):
del self.template[which]
log.error("Missing %s image file: %s", which, uri)
return
try:
w, h, _ = MyImage.size_for_node(
dict(
uri=uri,
),
self.client,
)
except ValueError:
# Broken image, return arbitrary stuff
uri = missing
w, h, = (
100,
100,
)
pw, ph = self.styles.pw, self.styles.ph
if background_fit_mode == 'center':
scale = min(1.0, 1.0 * pw / w, 1.0 * ph / h)
sw, sh = w * scale, h * scale
x, y = (pw - sw) / 2.0, (ph - sh) / 2.0
elif background_fit_mode == 'scale':
x, y = 0, 0
sw, sh = pw, ph
elif background_fit_mode == 'scale_width':
x, y = 0, 0
sw, sh = pw, h
else:
log.error('Unknown background fit mode: %s' % background_fit_mode)
# Do scale anyway
x, y = 0, 0
sw, sh = pw, ph
bg = MyImage(uri, sw, sh, client=self.client)
self.image_cache[uri] = info = bg, x, y
bg, x, y = info
bg.drawOn(canv, x, y)
def is_left(self, page_num):
"""Default behavior is that the first page is on the left.
If the user has --first_page_on_right, the calculation is reversed.
"""
val = page_num % 2 == 1
if self.client.first_page_on_right:
val = not val
return val
def beforeDrawPage(self, canv, doc):
"""Do adjustments to the page according to where we are in the document.
* Gutter margins on left or right as needed
"""
global _counter, _counterStyle
styles = self.styles
self.tw = styles.pw - styles.lm - styles.rm - styles.gm
# What page template to use?
tname = canv.__dict__.get('templateName', self.styles.firstTemplate)
if tname not in self.styles.pageTemplates:
log.error(f"Template '{tname}' is not defined")
sys.exit(1)
self.template = self.styles.pageTemplates[tname]
canv.templateName = tname
# Any background with fit mode?
background = canv.__dict__.get('background', None)
if background:
self.template = self.template.copy()
self.template['background'] = background
background_fit_mode = canv.__dict__.get('background_fit_mode', None)
if background_fit_mode:
self.template['background_fit_mode'] = background_fit_mode
doct = getattr(canv, '_doctemplate', None)
canv._doctemplate = None # to make _listWrapOn work
if doc.page == 1:
_counter = 0
_counterStyle = 'arabic'
_counter += 1
# Adjust text space accounting for header/footer
self.hh = self._head.prepare(self, canv, doc)
self.fh = self._foot.prepare(self, canv, doc)
canv._doctemplate = doct
self.hx = styles.lm
self.hy = styles.ph - styles.tm - self.hh
self.fx = styles.lm
self.fy = styles.bm
self.th = (
styles.ph
- styles.tm
- styles.bm
- self.hh
- self.fh
- styles.ts
- styles.bs
)
# Adjust gutter margins
if self.is_left(doc.page): # Left page
x1 = styles.lm
else: # Right page
x1 = styles.lm + styles.gm
y1 = styles.bm + self.fh + styles.bs
# If there is a background parameter for this page Template, draw it
if 'background' in self.template:
self.draw_background('background', canv)
self.frames = []
if 'frames' not in self.template:
log.error('No frames in template')
sys.exit(1)
for frame in self.template['frames']:
frame = frame[:]
while len(frame) < 8:
# This is the default in SmartFrame. At some point in the future we
# may want to change this to 0.
frame.append(6)
self.frames.append(
SmartFrame(
self,
styles.adjustUnits(frame[0], self.tw) + x1,
styles.adjustUnits(frame[1], self.th) + y1,
styles.adjustUnits(frame[2], self.tw),
styles.adjustUnits(frame[3], self.th),
leftPadding=styles.adjustUnits(frame[4], self.tw),
bottomPadding=styles.adjustUnits(frame[5], self.th),
rightPadding=styles.adjustUnits(frame[6], self.tw),
topPadding=styles.adjustUnits(frame[7], self.th),
showBoundary=self.show_frame,
)
)
canv.firstSect = True
canv._pagenum = doc.page
for frame in self.frames:
frame._pagenum = doc.page
def afterDrawPage(self, canv, doc):
"""Draw header/footer."""
# Adjust for gutter margin
canv.addPageLabel(
canv._pageNumber - 1, numberingstyles[_counterStyle], _counter
)
log.info('Page %s [%s]' % (_counter, doc.page))
if self.is_left(doc.page): # Left page
hx = self.hx
fx = self.fx
else: # Right Page
hx = self.hx + self.styles.gm
fx = self.fx + self.styles.gm
self._head.draw(self, canv, doc, hx, self.hy, self.tw, self.hh)
self._foot.draw(self, canv, doc, fx, self.fy, self.tw, self.fh)
# If there is a foreground parameter for this page Template, draw it
if 'foreground' in self.template:
self.draw_background('foreground', canv)
def parse_commandline():
parser = OptionParser()
parser.add_option(
'--config',
dest='configfile',
metavar='FILE',
help='Config file to use. Default=~/.rst2pdf/config',
)
parser.add_option(
'-o', '--output', dest='output', metavar='FILE', help='Write the PDF to FILE'
)
def_ssheets = ','.join(
[
expanduser(p)
for p in config.getValue("general", "stylesheets", "").split(',')
]
)
parser.add_option(
'-s',
'--stylesheets',
dest='style',
type='string',
action='append',
metavar='STYLESHEETS',
default=[def_ssheets],
help='A comma-separated list of custom stylesheets. Default="%s"' % def_ssheets,
)
def_sheetpath = os.pathsep.join(
[
expanduser(p)
for p in config.getValue("general", "stylesheet_path", "").split(os.pathsep)
]
)
parser.add_option(
'--stylesheet-path',
dest='stylepath',
metavar='FOLDER%sFOLDER%s...%sFOLDER' % ((os.pathsep,) * 3),
default=def_sheetpath,
help='A list of folders to search for stylesheets,'
' separated using "%s". Default="%s"' % (os.pathsep, def_sheetpath),
)
def_compressed = config.getValue("general", "compressed", False)
parser.add_option(
'-c',
'--compressed',
dest='compressed',
action="store_true",
default=def_compressed,
help='Create a compressed PDF. Default=%s' % def_compressed,
)
parser.add_option(
'--print-stylesheet',
dest='printssheet',
action="store_true",
default=False,
help='Print the default stylesheet and exit',
)
parser.add_option(
'--font-folder',
dest='ffolder',
metavar='FOLDER',
help='Search this folder for fonts. (Deprecated)',
)
def_fontpath = os.pathsep.join(
[
expanduser(p)
for p in config.getValue("general", "font_path", "").split(os.pathsep)
]
)
parser.add_option(
'--font-path',
dest='fpath',
metavar='FOLDER%sFOLDER%s...%sFOLDER' % ((os.pathsep,) * 3),
default=def_fontpath,
help='A list of folders to search for fonts, separated using "%s".'
' Default="%s"' % (os.pathsep, def_fontpath),
)
def_baseurl = urlunparse(['file', os.getcwd() + os.sep, '', '', '', ''])
parser.add_option(
'--baseurl',
dest='baseurl',
metavar='URL',
default=def_baseurl,
help='The base URL for relative URLs. Default="%s"' % def_baseurl,
)
def_lang = config.getValue("general", "language", 'en_US')
parser.add_option(
'-l',
'--language',
metavar='LANG',
default=def_lang,
dest='language',
help='Language to be used for hyphenation'
' and docutils localizations. Default="%s"' % def_lang,
)
def_header = config.getValue("general", "header")
parser.add_option(
'--header',
metavar='HEADER',
default=def_header,
dest='header',
help='Page header if not specified in the document.'
' Default="%s"' % def_header,
)
def_footer = config.getValue("general", "footer")
parser.add_option(
'--footer',
metavar='FOOTER',
default=def_footer,
dest='footer',
help='Page footer if not specified in the document.'
' Default="%s"' % def_footer,
)
def_section_header_depth = config.getValue("general", "section_header_depth", 2)
parser.add_option(
'--section-header-depth',
metavar='N',
default=def_section_header_depth,
dest='section_header_depth',
help='''Sections up to this depth will be used in the header and footer's replacement of ###Section###. Default=%s'''
% def_section_header_depth,
)
def_smartquotes = config.getValue("general", "smartquotes", "0")
parser.add_option(
"--smart-quotes",
metavar="VALUE",
default=def_smartquotes,
dest="smarty",
help='Try to convert ASCII quotes, ellipses and dashes'
' to the typographically correct equivalent. For details,'
' read the man page or the manual. Default="%s"' % def_smartquotes,
)
def_fit = config.getValue("general", "fit_mode", "shrink")
parser.add_option(
'--fit-literal-mode',
metavar='MODE',
default=def_fit,
dest='fit_mode',
help='What to do when a literal is too wide. One of error,'
' overflow,shrink,truncate. Default="%s"' % def_fit,
)
def_fit_background = config.getValue("general", "background_fit_mode", "center")
parser.add_option(
'--fit-background-mode',
metavar='MODE',
default=def_fit_background,
dest='background_fit_mode',
help='How to fit the background image to the page.'
' One of scale, scale_width or center. Default="%s"' % def_fit_background,
)
parser.add_option(
'--inline-links',
action="store_true",
dest='inlinelinks',
default=False,
help='Shows target between parentheses instead of active link.',
)
parser.add_option(
'--repeat-table-rows',
action="store_true",
dest='repeattablerows',
default=False,
help='Repeats header row for each split table.',
)
def_raw_html = config.getValue("general", "raw_html", False)
parser.add_option(
'--raw-html',
action="store_true",
dest='raw_html',
default=def_raw_html,
help='Support embeddig raw HTML. Default=%s' % def_raw_html,
)
parser.add_option(
'-q',
'--quiet',
action="store_true",
dest='quiet',
default=False,
help='Print less information.',
)
parser.add_option(
'-v',
'--verbose',
action="store_true",
dest='verbose',
default=False,
help='Print debug information.',
)
parser.add_option(
'--very-verbose',
action="store_true",
dest='vverbose',
default=False,
help='Print even more debug information.',
)
parser.add_option(
'--version',
action="store_true",
dest='version',
default=False,
help='Print version number and exit.',
)
def_footnote_backlinks = config.getValue("general", "footnote_backlinks", True)
parser.add_option(
'--no-footnote-backlinks',
action='store_false',
dest='footnote_backlinks',
default=def_footnote_backlinks,
help='Disable footnote backlinks.'
' Default=%s' % str(not def_footnote_backlinks),
)
def_inline_footnotes = config.getValue("general", "inline_footnotes", False)
parser.add_option(
'--inline-footnotes',
action='store_true',
dest='inline_footnotes',
default=def_inline_footnotes,
help='Show footnotes inline.' ' Default=%s' % str(not def_inline_footnotes),
)
def_real_footnotes = config.getValue("general", "real_footnotes", False)
parser.add_option(
'--real-footnotes',
action='store_true',
dest='real_footnotes',
default=def_real_footnotes,
help='Show footnotes at the bottom of the page where they are defined.'
' Default=%s' % str(def_real_footnotes),
)
def_dpi = config.getValue("general", "default_dpi", 300)
parser.add_option(
'--default-dpi',
dest='def_dpi',
metavar='NUMBER',
default=def_dpi,
help='DPI for objects sized in pixels. Default=%d' % def_dpi,
)
parser.add_option(
'--show-frame-boundary',
dest='show_frame',
action='store_true',
default=False,
help='Show frame borders (only useful for debugging). Default=False',
)
parser.add_option(
'--disable-splittables',
dest='splittables',
action='store_false',
default=True,
help="Don't use splittable flowables in some elements."
" Only try this if you can't process a document any other way.",
)
def_break = config.getValue("general", "break_level", 0)
parser.add_option(
'-b',
'--break-level',
dest='breaklevel',
metavar='LEVEL',
default=def_break,
help='Maximum section level that starts in a new page.'
' Default: %d' % def_break,
)
def_blankfirst = config.getValue("general", "blank_first_page", False)
parser.add_option(
'--blank-first-page',
dest='blank_first_page',
action='store_true',
default=def_blankfirst,
help='Add a blank page at the beginning of the document.',
)
def_first_page_on_right = config.getValue("general", "first_page_on_right", False)
parser.add_option(
'--first-page-on-right',
dest='first_page_on_right',
action='store_true',
default=def_first_page_on_right,
help='Two-sided book style (where first page starts on the right side)',
)
def_breakside = config.getValue("general", "break_side", 'any')
parser.add_option(
'--break-side',
dest='breakside',
metavar='VALUE',
default=def_breakside,
help='How section breaks work. Can be "even", and sections start'
' in an even page, "odd", and sections start in odd pages,'
' or "any" and sections start in the next page, be it even or odd.'
' See also the -b option.',
)
parser.add_option(
'--date-invariant',
dest='invariant',
action='store_true',
default=False,
help="Don't store the current date in the PDF."
" Useful mainly for the test suite,"
" where we don't want the PDFs to change.",
)
parser.add_option(
'-e',
'--extension-module',
dest='extensions',
action="append",
type="string",
default=[],
help="Add a helper extension module to this invocation of rst2pdf "
"(module must end in .py and be on the python path)",
)
def_cover = config.getValue("general", "custom_cover", 'cover.tmpl')
parser.add_option(
'--custom-cover',
dest='custom_cover',
metavar='FILE',
default=def_cover,
help='Template file used for the cover page. Default: %s' % def_cover,
)
def_floating_images = config.getValue("general", "floating_images", False)
parser.add_option(
'--use-floating-images',
action='store_true',
default=def_floating_images,
help='Makes images with :align: attribute work more like in rst2html. Default: %s'
% def_floating_images,
dest='floating_images',
)
def_numbered_links = config.getValue("general", "numbered_links", False)
parser.add_option(
'--use-numbered-links',
action='store_true',
default=def_numbered_links,
help='When using numbered sections, adds the numbers to all links referring to the section headers. Default: %s'
% def_numbered_links,
dest='numbered_links',
)
parser.add_option(
'--strip-elements-with-class',
action='append',
dest='strip_elements_with_classes',
metavar='CLASS',
help='Remove elements with this CLASS from the output. Can be used multiple times.',
)
return parser
def main(_args=None):
"""Parse command line and call createPdf with the correct data."""
parser = parse_commandline()
# Fix issue 430: don't overwrite args
# need to parse_args to see i we have a custom config file
options, args = parser.parse_args(copy(_args))
if options.configfile:
# If there is a config file, we need to reparse
# the command line because we have different defaults
config.parseConfig(options.configfile)
parser = parse_commandline()
options, args = parser.parse_args(copy(_args))
if options.version:
from rst2pdf import version
print(version)
sys.exit(0)
if options.quiet:
log.setLevel(logging.CRITICAL)
if options.verbose:
log.setLevel(logging.INFO)
if options.vverbose:
log.setLevel(logging.DEBUG)
if options.printssheet:
# find base path
if hasattr(sys, 'frozen'):
PATH = abspath(dirname(sys.executable))
else:
PATH = abspath(dirname(__file__))
with open(join(PATH, 'styles', 'styles.yaml')) as fh:
print(fh.read())
sys.exit(0)
filename = False
if len(args) == 0:
args = [
'-',
]
elif len(args) > 2:
log.critical('Usage: %s [ file.txt [ file.pdf ] ]', sys.argv[0])
sys.exit(1)
elif len(args) == 2:
if options.output:
log.critical('You may not give both "-o/--output" and second argument')
sys.exit(1)
options.output = args.pop()
close_infile = False
if args[0] == '-':
infile = sys.stdin
options.basedir = os.getcwd()
elif len(args) > 1:
log.critical('Usage: %s file.txt [ -o file.pdf ]', sys.argv[0])
sys.exit(1)
else:
filename = args[0]
options.basedir = os.path.dirname(os.path.abspath(filename))
try:
infile = open(filename, 'rb')
close_infile = True
except IOError as e:
log.error(e)
sys.exit(1)
options.infile = infile
if options.output:
outfile = options.output
if outfile == '-':
outfile = sys.stdout.buffer
options.compressed = False
# we must stay quiet
log.setLevel(logging.CRITICAL)
else:
if filename:
if filename.endswith('.txt') or filename.endswith('.rst'):
outfile = filename[:-4] + '.pdf'
else:
outfile = filename + '.pdf'
else:
outfile = sys.stdout.buffer
options.compressed = False
# we must stay quiet
log.setLevel(logging.CRITICAL)
# /reportlab/pdfbase/pdfdoc.py output can
# be a callable (stringio, stdout ...)
options.outfile = outfile
ssheet = []
if options.style:
for l in options.style:
ssheet += l.split(',')
else:
ssheet = []
options.style = [x for x in ssheet if x]
fpath = []
if options.fpath:
fpath = options.fpath.split(os.pathsep)
if options.ffolder:
fpath.append(options.ffolder)
options.fpath = fpath
spath = []
if options.stylepath:
spath = options.stylepath.split(os.pathsep)
options.stylepath = spath
if options.real_footnotes:
options.inline_footnotes = True
if reportlab.Version < '3.0':
log.warning(
'You are using Reportlab version %s.'
' The suggested version is 3.0 or higher' % reportlab.Version
)
if options.invariant:
patch_PDFDate()
patch_digester()
add_extensions(options)
return_code = RstToPdf(
stylesheets=options.style,
language=options.language,
header=options.header,
footer=options.footer,
inlinelinks=options.inlinelinks,
breaklevel=int(options.breaklevel),
baseurl=options.baseurl,
fit_mode=options.fit_mode,
background_fit_mode=options.background_fit_mode,
smarty=str(options.smarty),
font_path=options.fpath,
style_path=options.stylepath,
repeat_table_rows=options.repeattablerows,
footnote_backlinks=options.footnote_backlinks,
inline_footnotes=options.inline_footnotes,
real_footnotes=options.real_footnotes,
def_dpi=int(options.def_dpi),
basedir=options.basedir,
show_frame=options.show_frame,
splittables=options.splittables,
blank_first_page=options.blank_first_page,
first_page_on_right=options.first_page_on_right,
breakside=options.breakside,
custom_cover=options.custom_cover,
floating_images=options.floating_images,
numbered_links=options.numbered_links,
raw_html=options.raw_html,
section_header_depth=int(options.section_header_depth),
strip_elements_with_classes=options.strip_elements_with_classes,
).createPdf(
text=options.infile.read(),
source_path=options.infile.name,
output=options.outfile,
compressed=options.compressed,
)
if close_infile:
infile.close()
sys.exit(return_code)
# Ugly hack that fixes Issue 335
reportlab.lib.utils.ImageReader.__deepcopy__ = lambda self, *x: copy(self)
def patch_digester():
"""Patch digester so that we can get the same results when image
filenames change"""
import reportlab.pdfgen.canvas as canvas
cache = {}
def _digester(s):
index = cache.setdefault(s, len(cache))
return 'rst2pdf_image_%s' % index
canvas._digester = _digester
def patch_PDFDate():
'''Patch reportlab.pdfdoc.PDFDate so the invariant dates work correctly'''
from reportlab.pdfbase import pdfdoc
import reportlab
class PDFDate(pdfdoc.PDFObject):
__PDFObject__ = True
# gmt offset now suppported
def __init__(self, invariant=True, ts=None, dateFormatter=None):
now = (2000, 0o1, 0o1, 00, 00, 00, 0)
self.date = now[:6]
self.dateFormatter = dateFormatter
def format(self, doc):
dfmt = self.dateFormatter or (
lambda yyyy, mm, dd, hh, m, s: "D:%04d%02d%02d%02d%02d%02d%+03d'%02d'"
% (yyyy, mm, dd, hh, m, s, 0, 0)
)
return pdfdoc.format(pdfdoc.PDFString(dfmt(*self.date)), doc)
pdfdoc.PDFDate = PDFDate
reportlab.rl_config.invariant = 1
def add_extensions(options):
extensions = []
for ext in options.extensions:
if not ext.startswith('!'):
extensions.append(ext)
continue
ext = ext[1:]
try:
extensions.remove(ext)
except ValueError:
log.warning(
'Could not remove extension %s -- no such extension installed' % ext
)
else:
log.info('Removed extension %s' % ext)
options.extensions[:] = extensions
if not extensions:
return
class ModuleProxy(object):
def __init__(self):
self.__dict__ = globals()
createpdf = ModuleProxy()
for modname in options.extensions:
prefix, modname = os.path.split(modname)
path_given = prefix
if modname.endswith('.py'):
modname = modname[:-3]
path_given = True
if not prefix:
prefix = os.path.join(os.path.dirname(__file__), 'extensions')
if prefix not in sys.path:
sys.path.append(prefix)
prefix = os.getcwd()
if prefix not in sys.path:
sys.path.insert(0, prefix)
log.info('Importing extension module %s', repr(modname))
firstname = path_given and modname or (modname + '_r2p')
_names = [firstname, modname]
import_exc = None
for _name in _names:
try:
module = import_module(_name, 'rst2pdf')
break
except ImportError as e:
import_exc = e
else:
if not import_exc:
continue
if str(import_exc).split()[-1].replace("'", '') not in [firstname, modname]:
raise import_exc
raise SystemExit(
'\nError: Could not find module %s in sys.path [\n %s\n]\n'
'Exiting...\n' % (modname, ',\n '.join(sys.path))
)
if hasattr(module, 'install'):
module.install(createpdf, options)
def publish_secondary_doctree(text, main_tree, source_path):
# This is a hack so the text substitutions defined
# in the document are available when we process the cover
# page. See Issue 322
dt = main_tree
# Add substitutions from the main doctree
class addSubsts(Transform):
default_priority = 219
def apply(self):
self.document.substitution_defs.update(dt.substitution_defs)
self.document.substitution_names.update(dt.substitution_names)
# Use an own reader to modify transformations done.
class Reader(standalone.Reader):
def get_transforms(self):
default = standalone.Reader.get_transforms(self)
return default + [
addSubsts,
]
# End of Issue 322 hack
return docutils.core.publish_doctree(text, reader=Reader(), source_path=source_path)
if __name__ == "__main__":
main(sys.argv[1:])
| mit | c1a2062d6e86af9332123a9874e4592e | 32.973698 | 125 | 0.547385 | 4.195325 | false | false | false | false |
kivy/python-for-android | pythonforandroid/recipes/android/src/android/mixer.py | 5 | 6326 | # This module is, as much a possible, a clone of the pygame
# mixer api.
import android._android_sound as sound
import time
import threading
import os
condition = threading.Condition()
def periodic():
for i in range(0, num_channels):
if i in channels:
channels[i].periodic()
num_channels = 8
reserved_channels = 0
def init(frequency=22050, size=-16, channels=2, buffer=4096):
return None
def pre_init(frequency=22050, size=-16, channels=2, buffersize=4096):
return None
def quit():
stop()
return None
def stop():
for i in range(0, num_channels):
sound.stop(i)
def pause():
for i in range(0, num_channels):
sound.pause(i)
def unpause():
for i in range(0, num_channels):
sound.unpause(i)
def get_busy():
for i in range(0, num_channels):
if sound.busy(i):
return True
return False
def fadeout(time):
# Fadeout doesn't work - it just immediately stops playback.
stop()
# A map from channel number to Channel object.
channels = {}
def set_num_channels(count):
global num_channels
num_channels = count
def get_num_channels(count):
return num_channels
def set_reserved(count):
global reserved_channels
reserved_channels = count
def find_channel(force=False):
busy = []
for i in range(reserved_channels, num_channels):
c = Channel(i)
if not c.get_busy():
return c
busy.append(c)
if not force:
return None
busy.sort(key=lambda x: x.play_time)
return busy[0]
class ChannelImpl(object):
def __init__(self, id):
self.id = id
self.loop = None
self.queued = None
self.play_time = time.time()
def periodic(self):
qd = sound.queue_depth(self.id)
if qd < 2:
self.queued = None
if self.loop is not None and sound.queue_depth(self.id) < 2:
self.queue(self.loop, loops=1)
def play(self, s, loops=0, maxtime=0, fade_ms=0):
if loops:
self.loop = s
sound.play(self.id, s.file, s.serial)
self.play_time = time.time()
with condition:
condition.notify()
def seek(self, position):
sound.seek(self.id, position)
def stop(self):
self.loop = None
sound.stop(self.id)
def pause(self):
sound.pause(self.id)
def unpause(self):
sound.pause(self.id)
def fadeout(self, time):
# No fadeout
self.stop()
def set_volume(self, left, right=None):
sound.set_volume(self.id, left)
def get_volume(self):
return sound.get_volume(self.id)
def get_busy(self):
return sound.busy(self.id)
def get_sound(self):
is_busy = sound.busy(self.id)
if not is_busy:
return
serial = sound.playing_name(self.id)
if not serial:
return
return sounds.get(serial, None)
def queue(self, s):
self.loop = None
self.queued = s
sound.queue(self.id, s.what, s.serial)
with condition:
condition.notify()
def get_queue(self):
return self.queued
def get_pos(self):
return sound.get_pos(self.id)/1000.
def get_length(self):
return sound.get_length(self.id)/1000.
def Channel(n):
"""
Gets the channel with the given number.
"""
rv = channels.get(n, None)
if rv is None:
rv = ChannelImpl(n)
channels[n] = rv
return rv
sound_serial = 0
sounds = {}
class Sound(object):
def __init__(self, what):
# Doesn't support buffers.
global sound_serial
self._channel = None
self._volume = 1.
self.serial = str(sound_serial)
sound_serial += 1
if isinstance(what, file): # noqa F821
self.file = what
else:
self.file = file(os.path.abspath(what), "rb") # noqa F821
sounds[self.serial] = self
def play(self, loops=0, maxtime=0, fade_ms=0):
# avoid new play if the sound is already playing
# -> same behavior as standard pygame.
if self._channel is not None:
if self._channel.get_sound() is self:
return
self._channel = channel = find_channel(True)
channel.set_volume(self._volume)
channel.play(self, loops=loops)
return channel
def stop(self):
for i in range(0, num_channels):
if Channel(i).get_sound() is self:
Channel(i).stop()
def fadeout(self, time):
self.stop()
def set_volume(self, left, right=None):
self._volume = left
if self._channel:
if self._channel.get_sound() is self:
self._channel.set_volume(self._volume)
def get_volume(self):
return self._volume
def get_num_channels(self):
rv = 0
for i in range(0, num_channels):
if Channel(i).get_sound() is self:
rv += 1
return rv
def get_length(self):
return 1.0
music_channel = Channel(256)
music_sound = None
class music(object):
@staticmethod
def load(filename):
music_channel.stop()
global music_sound
music_sound = Sound(filename)
@staticmethod
def play(loops=0, start=0.0):
# No start.
music_channel.play(music_sound, loops=loops)
@staticmethod
def rewind():
music_channel.play(music_sound)
@staticmethod
def seek(position):
music_channel.seek(position)
@staticmethod
def stop():
music_channel.stop()
@staticmethod
def pause():
music_channel.pause()
@staticmethod
def unpause():
music_channel.unpause()
@staticmethod
def fadeout(time):
music_channel.fadeout(time)
@staticmethod
def set_volume(value):
music_channel.set_volume(value)
@staticmethod
def get_volume():
return music_channel.get_volume()
@staticmethod
def get_busy():
return music_channel.get_busy()
@staticmethod
def get_pos():
return music_channel.get_pos()
@staticmethod
def queue(filename):
return music_channel.queue(Sound(filename))
| mit | 323be6ac1288a38251c9416cbee1adc9 | 18.524691 | 70 | 0.577774 | 3.680047 | false | false | false | false |
luanfonceca/speakerfight | deck/migrations/0012_auto_20151211_1354.py | 4 | 1446 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('deck', '0011_auto_20150825_1628'),
]
operations = [
migrations.AlterField(
model_name='activity',
name='activity_type',
field=models.CharField(default=b'proposal', max_length=50, verbose_name='Type', choices=[(b'proposal', 'Proposal'), (b'workshop', 'Workshop'), (b'openning', 'Openning'), (b'coffee-break', 'Coffee Break'), (b'lunch', 'Lunch'), (b'lightning-talks', 'Lightning Talks'), (b'ending', 'Ending')]),
preserve_default=True,
),
migrations.AlterField(
model_name='activity',
name='description',
field=models.TextField(max_length=10000, verbose_name='Description', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='event',
name='description',
field=models.TextField(max_length=10000, verbose_name='Description', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='proposal',
name='activity_ptr',
field=models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='deck.Activity'),
preserve_default=True,
),
]
| mit | 4eccd86ddc18cd39265a6ba08eee5a37 | 37.052632 | 303 | 0.592669 | 4.05042 | false | false | false | false |
bd-j/prospector | prospect/plotting/sfh.py | 1 | 17351 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from argparse import Namespace
from copy import deepcopy
import numpy as np
from scipy.special import gamma, gammainc
from ..models.transforms import logsfr_ratios_to_masses
from ..sources.constants import cosmo
from .corner import quantile
__all__ = ["params_to_sfh", "parametric_pset",
"parametric_cmf", "parametric_mwa", "parametric_sfr",
"compute_mass_formed",
"ratios_to_sfrs", "sfh_quantiles",
"sfh_to_cmf", "nonpar_mwa", "nonpar_recent_sfr"]
def params_to_sfh(params, time=None, agebins=None):
parametric = (time is not None)
if parametric:
taus, tages, masses = params["tau"], params["tage"], params["mass"]
sfhs = []
cmfs = []
for tau, tage, mass in zip(taus, tages, masses):
sfpar = dict(tau=tau, tage=tage, mass=mass, sfh=params["sfh"])
sfhs.append(parametric_sfr(times=time, tavg=0, **sfpar))
cmfs.append(parametric_cmf(times=time, **sfpar))
lookback = time.max() - time
sfhs = np.array(sfhs)
cmfs = np.array(cmfs)
else:
logmass = params["logmass"]
logsfr_ratios = params["logsfr_ratios"]
sfhs = np.array([ratios_to_sfrs(logm, sr, agebins)
for logm, sr in zip(logmass, logsfr_ratios)])
cmfs = sfh_to_cmf(sfhs, agebins)
lookback = 10**(agebins-9)
return lookback, sfhs, cmfs
def parametric_pset(logmass=None, **sfh):
"""Convert a dicionary of FSPS parametric SFH parameters into a
namespace, making sure they are at least 1d vectors
:param sfh: dicionary
FSPS parameteric SFH parameters
:returns pset:
A Namespace instance with attributes giving the SFH parameters
"""
# TODO: make multiple psets if any of the SFH parameters have np.size() > 1
vectors = ["mass", "sf_start", "tage", "tau", "const", "fburst", "tburst", "sf_trunc", "sf_slope"]
pset = Namespace(mass=1.0, sfh=4., sf_start=0, tage=1,
tau=1.,
const=0.,
fburst=0., tburst=1.,
sf_trunc=0, sf_slope=0.)
if logmass:
sfh["mass"] = 10**logmass
for k in vars(pset).keys():
if k in sfh:
setattr(pset, k, sfh[k])
# vectorize
for k in vectors:
setattr(pset, k, np.atleast_1d(getattr(pset, k)))
return pset
def sfh_quantiles(tvec, bins, sfrs, weights=None, q=[16, 50, 84]):
"""Compute quantiles of a binned SFH
:param tvec: shape (ntime,)
Vector of lookback times onto which the SFH will be interpolated.
:param bins: shape (nsamples, nbin, 2)
The age bins, in linear untis, same units as tvec
:param sfrs: shape (nsamples, nbin)
The SFR in each bin
:returns sfh_q: shape(ntime, nq)
The quantiles of the SFHs at each lookback time in `tvec`
"""
tt = bins.reshape(bins.shape[0], -1)
ss = np.array([sfrs, sfrs]).transpose(1, 2, 0).reshape(bins.shape[0], -1)
sf = np.array([np.interp(tvec, t, s, left=0, right=0) for t, s in zip(tt, ss)])
if weights is not None:
qq = quantile(sf.T, q=np.array(q)/100., weights=weights)
else:
qq = np.percentile(sf, axis=0, q=q)
return qq
def parametric_sfr(times=None, tavg=1e-3, tage=1, **sfh):
"""Return the SFR (Msun/yr) for the given parameters of a parametric SFH,
optionally averaging over some timescale.
:param times: (optional, ndarray)
If given, a set of *lookback* times where you want to calculate the sfr,
same units as `tau` and `tage`
:param tavg: (optional, float, default: 1e-3)
If non-zero, average the SFR over the last `tavg` Gyr. This can help
capture bursts. If zero, the instantaneous SFR will be returned.
:param sfh: optional keywords
FSPS parametric SFH parametrs, e.g. sfh, tage, tau, sf_trunc
:returns sfr:
SFR in M_sun/year either for the lookback times given by `times` or at
lookback time 0 if no times are given. The SFR will either be
instaneous or averaged over the last `tavg` Gyr.
"""
if times is None:
times = np.array(tage)
pset = parametric_pset(tage=tage, **sfh)
sfr, mass = compute_mass_formed(tage - times, pset)
if tavg > 0:
_, meps = compute_mass_formed((tage - times) - tavg, pset)
sfr = (mass - meps) / (tavg * 1e9)
return sfr
def parametric_cmf(times=None, tage=1., **sfh):
"""Return the cumulative formed mass for the given parameters of a
parametric SFH.
:param times: (optional, ndarray)
If given, a set of *lookback* times (relative to `tage`) where you want
to calculate the formed mass, in Gyr. If not given, the formed mass
will be computed for loockback time of 0.
:param sfh: optional keywords
FSPS parametric SFH parametrs, e.g. sfh, tage, tau, sf_trunc
:returns mass: (ndarray)
Mass formed up to the supplied lookback time, in units of M_sun.
Same shape as `times`
"""
if times is None:
times = np.array(sfh["tage"])
pset = parametric_pset(tage=tage**sfh)
_, mass = compute_mass_formed(tage - times, pset)
return mass
def parametric_mwa_numerical(tau=4, tage=13.7, power=1, n=1000):
"""Compute Mass-weighted age
:param power: (optional, default: 1)
Use 0 for exponential decline, and 1 for te^{-t} (delayed exponential decline)
"""
p = power + 1
t = np.linspace(0, tage, n)
tavg = np.trapz((t**p)*np.exp(-t/tau), t) / np.trapz(t**(power) * np.exp(-t/tau), t)
return tage - tavg
def parametric_mwa(tau=4, tage=13.7, power=1):
"""Compute Mass-weighted age. This is done analytically
:param power: (optional, default: 1)
Use 0 for exponential decline, and 1 for te^{-t} (delayed exponential decline)
"""
tt = tage / tau
mwt = gammainc(power+2, tt) * gamma(power+2) / gammainc(power+1, tt) * tau
return tage - mwt
def ratios_to_sfrs(logmass, logsfr_ratios, agebins):
"""scalar
"""
masses = logsfr_ratios_to_masses(np.squeeze(logmass),
np.squeeze(logsfr_ratios),
agebins)
dt = (10**agebins[:, 1] - 10**agebins[:, 0])
sfrs = masses / dt
return sfrs
def nonpar_recent_sfr(logmass, logsfr_ratios, agebins, sfr_period=0.1):
"""vectorized
"""
masses = [logsfr_ratios_to_masses(np.squeeze(logm), np.squeeze(sr), agebins)
for logm, sr in zip(logmass, logsfr_ratios)]
masses = np.array(masses)
ages = 10**(agebins - 9)
# fractional coverage of the bin by the sfr period
ft = np.clip((sfr_period - ages[:, 0]) / (ages[:, 1] - ages[:, 0]), 0., 1)
mformed = (ft * masses).sum(axis=-1)
return mformed / (sfr_period * 1e9)
def nonpar_mwa(logmass, logsfr_ratios, agebins):
"""mass-weighted age, vectorized
"""
sfrs = np.array([ratios_to_sfrs(logm, sr, agebins)
for logm, sr in zip(logmass, logsfr_ratios)])
ages = 10**(agebins)
dtsq = (ages[:, 1]**2 - ages[:, 0]**2) / 2
mwa = [(dtsq * sfr).sum() / 10**logm
for sfr, logm in zip(sfrs, logmass)]
return np.array(mwa) / 1e9
def sfh_to_cmf(sfrs, agebins):
sfrs = np.atleast_2d(sfrs)
dt = (10**agebins[:, 1] - 10**agebins[:, 0])
masses = (sfrs * dt)[..., ::-1]
cmfs = masses.cumsum(axis=-1)
cmfs /= cmfs[..., -1][..., None]
zshape = list(cmfs.shape[:-1]) + [1]
zeros = np.zeros(zshape)
cmfs = np.append(zeros, cmfs, axis=-1)
ages = 10**(np.array(agebins) - 9)
ages = np.array(ages[:, 0].tolist() + [ages[-1, 1]])
return ages, np.squeeze(cmfs[..., ::-1])
def compute_mass_formed(times, pset):
"""Compute the SFR and stellar mass formed in a parametric SFH,
as a function of (forward) time.
The linear portion of the Simha SFH (sfh=5) is defined as:
psi(t) = psi_trunc + psi_trunc * sf_slope * (t - sf_trunc)
where psi_trunc is the SFR of the delay-tau SFH at time sf_trunc
:param times: ndarray of shape (nt,)
Forward time in Gyr. Use times = pset.tage - t_lookback to
convert from lookback times
:param pset: Namespace instance
The FSPS SFH parameters, assumed to be scalar or 1 element 1-d arrays.
Usually the output of parametric_pset()
:returns sfr: ndarray of shape (nt,)
The instaneous SFR in M_sun/yr at each of `times`
:returns mfromed: ndarray of shape (nt,)
The total stellar mass formed from t=0 to `times`, in unist of M_sun
"""
# TODO: use broadcasting to deal with multiple sfhs?
# subtract sf_start
tmass = pset.tage - pset.sf_start # the age at which the mass is *specified*
tprime = times - pset.sf_start # the ages at which sfr and formed mass are requested
if pset.sfh == 3:
raise NotImplementedError("This method does not support tabular SFH")
if np.any(tmass < 0):
raise ValueError("SF never started (tage - sf_start < 0) for at least one input")
if (pset.const + pset.fburst) > 1:
raise ValueError("Constant and burst fractions combine to be > 1")
if (pset.sfh == 0):
# SSPs
mfrac = 1.0 * (tprime > tmass)
sfr = np.zeros_like(tprime) # actually the sfr is infinity
elif pset.sfh > 0:
# Compute tau model component, for SFH=1,4,5
#
# Integration limits are from 0 to tmax and 0 to tprime, where
# - tmass is the tage, and
# - tprime is the given `time`,
# - ttrunc is where the delay-tau truncates
ttrunc, tend = np.max(tprime), tmass
if (pset.sf_trunc > 0) and (pset.sf_trunc > pset.sf_start):
# Otherwise we integrate tau model to sf_trunc - sf_start
ttrunc = pset.sf_trunc - pset.sf_start
tend = min(tmass, ttrunc)
# Now integrate to get mass formed by Tprime and by Tmax, dealing with
# truncation that happens after sf_start but before Tmax and/or Tprime.
power = 1 + int(pset.sfh > 3)
total_mass_tau = pset.tau * gammainc(power, tend / pset.tau)
tt = np.clip(tprime, 0, ttrunc)
mass_tau = (tprime > 0.) * pset.tau * gammainc(power, tt / pset.tau)
# The SFR at Tprime (unnormalized)
sfr_tau = (tprime > 0.) * (tprime <= ttrunc) * (tprime / pset.tau)**(power-1.) * np.exp(-tprime / pset.tau)
# fraction of tau component mass formed by tprime
mfrac_tau = mass_tau / total_mass_tau
# Add the constant and burst portions, for SFH=1,4.
if ((pset.sfh == 1) or (pset.sfh == 4)):
# Fraction of the burst mass formed by Tprime
tburst = (pset.tburst - pset.sf_start)
mfrac_burst = 1.0 * (tprime > tburst)
# SFR from constant portion at Tprime (integrates to 1 at tmax)
sfr_const = (tprime > 0) * 1.0 / tmass
# fraction of constant mass formed by tprime
mfrac_const = np.clip(tprime, 0, ttrunc) * sfr_const
# Add formed mass fractions for each component, weighted by component fractions.
# Fraction of the constant mass formed by Tprime is just Tprime/Tmax
# TODO : The FSPS source does not include the tburst < tmass logic....
mfrac = ((1. - pset.const - pset.fburst * (tburst < tmass)) * mfrac_tau +
pset.const * mfrac_const +
pset.fburst * mfrac_burst)
# N.B. for Tprime = tburst, sfr is infinite, but we ignore that case.
sfr = ((1. - pset.const - pset.fburst) * sfr_tau / total_mass_tau + pset.const * sfr_const)
# We've truncated
sfr *= (tprime <= ttrunc)
# Add the linear portion, for Simha, SFH=5.
# This is the integral of sfr_trunc*(1 - m * (T - Ttrunc)) from Ttrunc to Tz
elif (pset.sfh == 5):
#raise NotImplementedError
m = -pset.sf_slope
if (m > 0):
# find time at which SFR=0, if m>0
Tz = ttrunc + 1.0 / m
else:
# m <= 0 will never reach SFR=0
Tz = np.max(tprime)
# Logic for Linear portion
if (ttrunc < 0):
# Truncation does not occur during the SFH.
total_mass_linear = 0.
mass_linear = 0.
sfr = sfr_tau / total_mass_tau
else:
# Truncation does occur, integrate linear to zero crossing or tage.
Thi = min(Tz, tmass)
sfr_trunc = (ttrunc/pset.tau) * np.exp(-ttrunc / pset.tau)
total_mass_linear = (Thi > ttrunc) * sfr_trunc * linear_mass(Thi, ttrunc, m)
mass_linear = (tprime > ttrunc) * sfr_trunc * linear_mass(tprime, ttrunc, m)
mass_linear[tprime > Tz] = sfr_trunc * linear_mass(Tz, ttrunc, m)
# SFR in linear portion
sfr = sfr_trunc * (1 - m * (tprime - ttrunc)) / (total_mass_tau + total_mass_linear)
sfr *= ((tprime > ttrunc) & (tprime <= Tz))
# add portion for tau
sfr[tprime <= ttrunc] = sfr_tau[tprime <= ttrunc] / (total_mass_tau + total_mass_linear)
mfrac = (mass_tau + mass_linear) / (total_mass_tau + total_mass_linear)
return pset.mass * sfr/1e9, pset.mass * mfrac
def linear_mass(t, ttrunc, m):
"""Integrate (1-m*(a-ttrunc)) da from a=ttrunc to a=t
"""
tt = t - ttrunc
return ((tt + ttrunc * m * tt) - m/2. * (t**2 - ttrunc**2))
default_sfh = dict(mass=1.0, sfh=4, tage=1., tau=2.,
sf_start=0., sf_trunc=0., fburst=0., const=0., tburst=0.5)
def show_par_sfh(times, label="", axes=[], tavg=0.01, tol=1e-3, **params):
sfh = deepcopy(default_sfh)
sfh.update(params)
pset = parametric_pset(**sfh)
sfr, mass = compute_mass_formed(times, pset)
sf_label = r"SFR"
if tavg > 0:
sfr = parametric_sfr(times, **sfh)
sf_label = r"$\langle {{\rm SFR}}\rangle_{{{}}}$".format(tavg)
ax = axes[0]
ax.plot(times, sfr)
ax.set_ylabel(sf_label)
ax = axes[1]
ax.plot(times, mass)
ax.set_ylabel("M(<t)")
ax.axhline(pset.mass, linestyle=":", color='k', linewidth=1.0, label="mass at tage")
[ax.axvline(pset.tage, linestyle="--", color='firebrick', label="tage") for ax in axes]
if pset.sf_trunc > 0:
[ax.axvline(pset.sf_trunc, linestyle="--", color='seagreen', label="sf_trunc") for ax in axes]
if pset.fburst > 0:
[ax.axvline(pset.tburst, linestyle="--", color='darkorange', label="tburst") for ax in axes]
if pset.sfh > 5:
[ax.axvline(pset.sf_trunc, linestyle="--", label="tburst") for ax in axes]
mr = np.trapz(sfr, times) * 1e9 / mass.max()
print("{}: {:0.6f}".format(label, mr))
if (pset.sfh > 0) & (pset.fburst == 0):
assert np.abs(mr - 1) < tol
return mr
if __name__ == "__main__":
# time array
times = np.linspace(0, 5, 1000)
# --- test cases ---
ncases = 14
import matplotlib.pyplot as pl
pl.ion()
fig, axes = pl.subplots(ncases + 1, 2, sharex="row",
figsize=(8.5, ncases * 1.5))
i = 0
# default
show_par_sfh(times, label="default", axes=axes[i])
# sf start
i += 1
show_par_sfh(times, label="test SF start", axes=axes[i],
sf_start=1.0, tage=2.0)
# with const
i += 1
show_par_sfh(times, label="with const", axes=axes[i],
const=0.5)
# const w/ sf_start
i += 1
show_par_sfh(times, label="const w/ sf_start", axes=axes[i],
const=0.5, sf_start=1.0, tage=2.0)
# const w/ sf_start & trunc
i += 1
show_par_sfh(times, label="const w/ sf_start & trunc", axes=axes[i],
const=0.5, sf_trunc=4., sf_start=1.0, tage=2.0)
# pure const w/ sf_start & trunc
i += 1
show_par_sfh(times, label="pure const w/ sf_start & trunc", tol=5e-3, axes=axes[i],
const=1., sf_trunc=4., sf_start=1.0, tage=2.0)
# burst before tage
i += 1
show_par_sfh(times, label="burst before tage", axes=axes[i],
fburst=0.5, tburst=0.5)
# burst after tage
i += 1
show_par_sfh(times, label="burst after tage", axes=axes[i],
fburst=0.5, tburst=1.5)
# burst at sf_start
i += 1
show_par_sfh(times, label="burst at sf_start", axes=axes[i],
fburst=0.5, tburst=1.5, sf_start=1.5, tage=3)
# pure exp
i += 1
show_par_sfh(times, label="pure EXP", tol=1e-2, axes=axes[i],
sfh=1)
# SSP
i += 1
show_par_sfh(times, label="SSP", axes=axes[i],
sfh=0)
# positive slope sf trunc before tage
show_par_sfh(times, label="pos quench before tage", axes=axes[-4],
sfh=5, tage=3, sf_trunc=2, tau=0.5, sf_slope=1)
# negative slope sf trunc before tage
show_par_sfh(times, label="neg quench before tage", axes=axes[-3],
sfh=5, tage=2, sf_trunc=1.5, tau=3, sf_slope=-1)
# positive slope sf trunc after tage
show_par_sfh(times, label="pos quench after tage", axes=axes[-2],
sfh=5, sf_trunc=3, tau=1, sf_slope=1)
# negative slope sf trunc after tage
show_par_sfh(times, label="neg quench after tage", axes=axes[-1],
sfh=5, sf_trunc=3, tau=3, sf_slope=-1)
| mit | 35fb8c7162ba45dff4d154130bc83257 | 34.923395 | 115 | 0.584462 | 3.025458 | false | false | false | false |
luanfonceca/speakerfight | api/serializers.py | 1 | 2924 | from django.contrib import auth
from rest_framework.reverse import reverse
from rest_framework import serializers
from deck.models import Event, Activity, Track
from deck.templatetags.deck_tags import get_user_photo
class UserSerializer(serializers.ModelSerializer):
full_name = serializers.SerializerMethodField()
photo = serializers.SerializerMethodField()
class Meta:
model = auth.get_user_model()
fields = ('full_name', 'photo')
def get_full_name(self, user):
return user.get_full_name() or user.username
def get_photo(self, user):
return get_user_photo(user)
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
author = UserSerializer(read_only=True)
timetable = serializers.SerializerMethodField()
url_api_event_activity = serializers.SerializerMethodField()
activity_type_display = serializers.SerializerMethodField()
class Meta:
model = Activity
fields = (
'title', 'slug', 'description', 'timetable',
'activity_type', 'author',
'start_timetable', 'end_timetable',
'url_api_event_activity', 'activity_type_display',
)
def get_timetable(self, activity):
return activity.timetable
def get_activity_type_display(self, activity):
return activity.get_activity_type_display()
def get_url_api_event_activity(self, activity):
event = activity.track.event
return reverse(
'api_event_activity',
[event.slug, activity.slug])
class CreateActivitySerializer(serializers.ModelSerializer):
author = UserSerializer(read_only=True)
timetable = serializers.SerializerMethodField()
url_api_event_activity = serializers.SerializerMethodField()
activity_type_display = serializers.SerializerMethodField()
class Meta:
model = Activity
fields = (
'pk', 'slug', 'title', 'description',
'timetable', 'activity_type', 'author',
'start_timetable', 'end_timetable',
'url_api_event_activity',
'activity_type_display',
)
def get_timetable(self, activity):
return activity.timetable
def get_activity_type_display(self, activity):
return activity.get_activity_type_display()
def get_url_api_event_activity(self, activity):
event = activity.track.event
return reverse(
'api_event_activity',
[event.slug, activity.slug])
class TrackSerializer(serializers.HyperlinkedModelSerializer):
activities = ActivitySerializer(read_only=True, many=True)
class Meta:
model = Track
fields = ('activities',)
class EventSerializer(serializers.HyperlinkedModelSerializer):
tracks = TrackSerializer(read_only=True, many=True)
class Meta:
model = Event
fields = ('title', 'description', 'tracks')
| mit | eecdc9dd330e4b5fa6ffbcee1b292c6d | 29.778947 | 65 | 0.665869 | 4.237681 | false | false | false | false |
bd-j/prospector | scripts/prospector_dynesty.py | 1 | 6908 | import time, sys, os
import numpy as np
np.errstate(invalid='ignore')
from prospect.models import model_setup
from prospect.io import write_results
from prospect import fitting
from prospect.likelihood import lnlike_spec, lnlike_phot, write_log
from dynesty.dynamicsampler import stopping_function, weight_function, _kld_error
from dynesty.utils import *
try:
import mpi4py
from mpi4py import MPI
from schwimmbad import MPIPool
mpi4py.rc.threads = False
mpi4py.rc.recv_mprobe = False
comm = MPI.COMM_WORLD
size = comm.Get_size()
withmpi = comm.Get_size() > 1
except ImportError:
withmpi = False
# --------------
# Read command line arguments
# --------------
sargv = sys.argv
argdict = {'param_file': ''}
clargs = model_setup.parse_args(sargv, argdict=argdict)
run_params = model_setup.get_run_params(argv=sargv, **clargs)
# --------------
# Globals
# --------------
# GP instances as global
spec_noise, phot_noise = model_setup.load_gp(**run_params)
# Model as global
global_model = model_setup.load_model(**run_params)
# Obs as global
global_obs = model_setup.load_obs(**run_params)
# SPS Model instance as global
sps = model_setup.load_sps(**run_params)
if withmpi:
# Run SPS over logzsol in order to get necessary data in cache/memory
# for each MPI process. Otherwise, you risk creating a lag between the MPI tasks
# cahcing data depending on where that task is in parameter space
# which can slow down the parallelization
initial_theta_grid = np.around(np.arange(global_model.config_dict["logzsol"]['prior'].range[0], global_model.config_dict["logzsol"]['prior'].range[1], step=0.01), decimals=2)
for theta_init in initial_theta_grid:
sps.ssp.params["sfh"] = global_model.params['sfh'][0]
sps.ssp.params["imf_type"] = global_model.params['imf_type'][0]
sps.ssp.params["logzsol"] = theta_init
sps.ssp._compute_csp()
# -----------------
# LnP function as global
# ------------------
def lnprobfn(theta, model=None, obs=None, verbose=run_params['verbose']):
"""Given a parameter vector and optionally a dictionary of observational
ata and a model object, return the ln of the posterior. This requires that
an sps object (and if using spectra and gaussian processes, a GP object) be
instantiated.
:param theta:
Input parameter vector, ndarray of shape (ndim,)
:param model:
bsfh.sedmodel model object, with attributes including ``params``, a
dictionary of model parameters. It must also have ``prior_product()``,
and ``mean_model()`` methods defined.
:param obs:
A dictionary of observational data. The keys should be
*``wavelength``
*``spectrum``
*``unc``
*``maggies``
*``maggies_unc``
*``filters``
* and optional spectroscopic ``mask`` and ``phot_mask``.
:returns lnp:
Ln posterior probability.
"""
if model is None:
model = global_model
if obs is None:
obs = global_obs
lnp_prior = model.prior_product(theta, nested=True)
if np.isfinite(lnp_prior):
# Generate mean model
try:
mu, phot, x = model.mean_model(theta, obs, sps=sps)
except(ValueError):
return -np.infty
# Noise modeling
if spec_noise is not None:
spec_noise.update(**model.params)
if phot_noise is not None:
phot_noise.update(**model.params)
vectors = {'spec': mu, 'unc': obs['unc'],
'sed': model._spec, 'cal': model._speccal,
'phot': phot, 'maggies_unc': obs['maggies_unc']}
# Calculate likelihoods
lnp_spec = lnlike_spec(mu, obs=obs, spec_noise=spec_noise, **vectors)
lnp_phot = lnlike_phot(phot, obs=obs, phot_noise=phot_noise, **vectors)
return lnp_phot + lnp_spec + lnp_prior
else:
return -np.infty
def prior_transform(u, model=None):
if model is None:
model = global_model
return model.prior_transform(u)
def halt(message):
"""Exit, closing pool safely.
"""
print(message)
try:
pool.close()
except:
pass
sys.exit(0)
if __name__ == "__main__":
# --------------
# Setup
# --------------
rp = run_params
rp['sys.argv'] = sys.argv
try:
rp['sps_libraries'] = sps.ssp.libraries
except(AttributeError):
rp['sps_libraries'] = None
# Use the globals
model = global_model
obs = global_obs
if rp.get('debug', False):
halt('stopping for debug')
# Try to set up an HDF5 file and write basic info to it
outroot = "{0}_{1}".format(rp['outfile'], int(time.time()))
odir = os.path.dirname(os.path.abspath(outroot))
if (not os.path.exists(odir)):
badout = 'Target output directory {} does not exist, please make it.'.format(odir)
halt(badout)
# -------
# Sample
# -------
if rp['verbose']:
print('dynesty sampling...')
tstart = time.time() # time it
if withmpi:
with MPIPool() as pool:
if not pool.is_master():
pool.wait()
sys.exit(0)
nprocs = pool.size
dynestyout = fitting.run_dynesty_sampler(lnprobfn, prior_transform, model.ndim,
pool=pool, queue_size=nprocs,
stop_function=stopping_function,
wt_function=weight_function,
**rp)
else:
pool = None
nprocs = 1
dynestyout = fitting.run_dynesty_sampler(lnprobfn, prior_transform, model.ndim,
pool=pool, queue_size=nprocs,
stop_function=stopping_function,
wt_function=weight_function,
**rp)
ndur = time.time() - tstart
print('done dynesty in {0}s'.format(ndur))
# -------------------------
# Output HDF5 (and pickles if asked for)
# -------------------------
if rp.get("output_pickles", False):
# Write the dynesty result object as a pickle
import pickle
with open(outroot + '_dns.pkl', 'w') as f:
pickle.dump(dynestyout, f)
# Write the model as a pickle
partext = write_results.paramfile_string(**rp)
write_results.write_model_pickle(outroot + '_model', model, powell=None,
paramfile_text=partext)
# Write HDF5
hfile = outroot + '_mcmc.h5'
write_results.write_hdf5(hfile, rp, model, obs, dynestyout,
None, tsample=ndur)
| mit | 726a67435a494e96a61e1e4976625bd8 | 31.739336 | 178 | 0.565431 | 3.721983 | false | false | false | false |
luanfonceca/speakerfight | core/models.py | 1 | 2691 |
from django.core.exceptions import AppRegistryNotReady
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext as _
from deck.models import Proposal
@python_2_unicode_compatible
class Profile(models.Model):
language = models.CharField(
_('Language'), choices=settings.LANGUAGES,
max_length=50, null=True, blank=False)
about_me = models.TextField(
_('About me'), max_length=500, null=True, blank=True)
github = models.CharField(
_('Github username'), max_length=50, null=True, blank=True)
facebook = models.CharField(
_('Facebook username'), max_length=50, null=True, blank=True)
twitter = models.CharField(
_('Twitter username'), max_length=50, null=True, blank=True)
site = models.URLField(
_('Site url'), max_length=200, null=True, blank=True)
image = models.ImageField(null=True, blank=True)
# relations
user = models.OneToOneField(to=settings.AUTH_USER_MODEL)
class Meta:
verbose_name = _('Profile')
def __str__(self):
return self.user.get_full_name()
def get_absolute_url(self):
return reverse_lazy(
'user_profile', kwargs={'user__username': self.user.username})
def get_github_url(self):
if self.github:
return 'https://github.com/{}'.format(self.github)
def get_facebook_url(self):
if self.facebook:
return 'https://facebook.com/{}'.format(self.facebook)
def get_twitter_url(self):
if self.twitter:
return 'https://twitter.com/{}'.format(self.twitter)
def get_site_url(self):
return self.site
def get_profile_events(self):
return self.user.events.filter(is_published=True)
def get_profile_proposals(self):
return Proposal.objects.filter(
author=self.user,
event__is_published=True,
event__anonymous_voting=False,
is_published=True,
)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
def slugify_user_username(sender, instance, **kwargs):
instance.username = instance.username.replace(' ', '_')
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except AppRegistryNotReady:
from django.contrib.auth.models import User
post_save.connect(create_user_profile, sender=User)
pre_save.connect(slugify_user_username, sender=User)
| mit | a3846a56230f9a8ca22b01a3de084cd2 | 30.290698 | 74 | 0.668153 | 3.806223 | false | false | false | false |
luanfonceca/speakerfight | organization/views.py | 1 | 1947 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.urlresolvers import reverse
from django.http import Http404
from django.utils.translation import ugettext as _
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from . models import Organization
from core.mixins import LoginRequiredMixin, FormValidRedirectMixing
class OwnerRequiredMixin(object):
def dispatch(self, *args, **kwargs):
"""Only owners can manage organizations."""
organization = self.get_object()
if self.request.user != organization.created_by \
and not self.request.user.is_superuser:
raise Http404
return super(OwnerRequiredMixin, self).dispatch(*args, **kwargs)
class BaseOrganizationView(LoginRequiredMixin, FormValidRedirectMixing):
model = Organization
fields = ['name', 'about']
template_Name = 'organization/organization_form.html'
def get_success_url(self):
return reverse('update_organization', kwargs={'slug': self.object.slug})
class CreateOrganization(BaseOrganizationView, CreateView):
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.created_by = self.request.user
self.object.save()
return self.success_redirect(_(u'Organization created.'))
class UpdateOrganization(OwnerRequiredMixin, BaseOrganizationView, UpdateView):
def form_valid(self, form):
self.object = form.save()
return self.success_redirect(_(u'Organization updated.'))
class DeleteOrganization(OwnerRequiredMixin, BaseOrganizationView, DeleteView):
template_Name = 'organization/organization_confirm_delete.html'
def form_valid(self, form):
return self.success_redirect(_(u'Organization deleted.'))
def get_success_url(self):
# TODO: Redirect to the organization list route when it gets done
return reverse('list_events')
| mit | ee6ae4420677d3a4530b4160bd6c3e85 | 33.767857 | 80 | 0.715973 | 4.288546 | false | false | false | false |
bd-j/prospector | scripts/prospector_restart.py | 3 | 7080 | #!/usr/local/bin/python
import time, sys, os
import numpy as np
np.errstate(invalid='ignore')
from prospect.models import model_setup
from prospect.io import write_results
from prospect.io import read_results as pr
from prospect import fitting
from prospect.likelihood import lnlike_spec, lnlike_phot, write_log, chi_spec, chi_phot
# --------------
# Read command line arguments
# --------------
sargv = sys.argv
argdict = {'restart_from': '', 'niter': 1024}
clargs = model_setup.parse_args(sargv, argdict=argdict)
# ----------
# Result object and Globals
# ----------
result, global_obs, global_model = pr.results_from(clargs["restart_from"])
is_emcee = (len(result["chain"].shape) == 3) & (result["chain"].shape[0] > 1)
assert is_emcee, "Result file does not have a chain of the proper shape."
# SPS Model instance (with libraries check)
sps = pr.get_sps(result)
run_params = result["run_params"]
run_params.update(clargs)
# Noise model (this should be doable via read_results)
from prospect.models.model_setup import import_module_from_string
param_file = (result['run_params'].get('param_file', ''),
result.get("paramfile_text", ''))
path, filename = os.path.split(param_file[0])
modname = filename.replace('.py', '')
user_module = import_module_from_string(param_file[1], modname)
spec_noise, phot_noise = user_module.load_gp(**run_params)
# -----------------
# LnP function as global
# ------------------
def lnprobfn(theta, model=None, obs=None, residuals=False,
verbose=run_params['verbose']):
"""Given a parameter vector and optionally a dictionary of observational
ata and a model object, return the ln of the posterior. This requires that
an sps object (and if using spectra and gaussian processes, a GP object) be
instantiated.
:param theta:
Input parameter vector, ndarray of shape (ndim,)
:param model:
bsfh.sedmodel model object, with attributes including ``params``, a
dictionary of model parameters. It must also have ``prior_product()``,
and ``mean_model()`` methods defined.
:param obs:
A dictionary of observational data. The keys should be
*``wavelength``
*``spectrum``
*``unc``
*``maggies``
*``maggies_unc``
*``filters``
* and optional spectroscopic ``mask`` and ``phot_mask``.
:returns lnp:
Ln posterior probability.
"""
if model is None:
model = global_model
if obs is None:
obs = global_obs
# Calculate prior probability and exit if not within prior
lnp_prior = model.prior_product(theta)
if not np.isfinite(lnp_prior):
return -np.infty
# Generate mean model
t1 = time.time()
try:
spec, phot, x = model.mean_model(theta, obs, sps=sps)
except(ValueError):
return -np.infty
d1 = time.time() - t1
# Return chi vectors for least-squares optimization
if residuals:
chispec = chi_spec(spec, obs)
chiphot = chi_phot(phot, obs)
return np.concatenate([chispec, chiphot])
# Noise modeling
if spec_noise is not None:
spec_noise.update(**model.params)
if phot_noise is not None:
phot_noise.update(**model.params)
vectors = {'spec': spec, 'unc': obs['unc'],
'sed': model._spec, 'cal': model._speccal,
'phot': phot, 'maggies_unc': obs['maggies_unc']}
# Calculate likelihoods
t2 = time.time()
lnp_spec = lnlike_spec(spec, obs=obs, spec_noise=spec_noise, **vectors)
lnp_phot = lnlike_phot(phot, obs=obs, phot_noise=phot_noise, **vectors)
d2 = time.time() - t2
if verbose:
write_log(theta, lnp_prior, lnp_spec, lnp_phot, d1, d2)
return lnp_prior + lnp_phot + lnp_spec
# -----------------
# MPI pool. This must be done *after* lnprob and
# chi2 are defined since slaves will only see up to
# sys.exit()
# ------------------
try:
from emcee.utils import MPIPool
pool = MPIPool(debug=False, loadbalance=True)
if not pool.is_master():
# Wait for instructions from the master process.
pool.wait()
sys.exit(0)
except(ImportError, ValueError):
pool = None
print('Not using MPI')
def halt(message):
"""Exit, closing pool safely.
"""
print(message)
try:
pool.close()
except:
pass
sys.exit(0)
# --------------
# Master branch
# --------------
if __name__ == "__main__":
# --------------
# Setup
# --------------
rp = run_params
rp['sys.argv'] = sys.argv
try:
rp['sps_libraries'] = sps.ssp.libraries
except(AttributeError):
rp['sps_libraries'] = None
# Use the globals
model = global_model
obsdat = global_obs
postkwargs = {}
# make zeros into tiny numbers
initial_theta = model.rectify_theta(model.initial_theta)
if rp.get('debug', False):
halt('stopping for debug')
# Try to set up an HDF5 file and write basic info to it
outroot = "{}_restart_{}".format(rp['outfile'], int(time.time()))
odir = os.path.dirname(os.path.abspath(outroot))
if (not os.path.exists(odir)):
halt('Target output directory {} does not exist, please make it.'.format(odir))
try:
import h5py
hfilename = outroot + '_mcmc.h5'
hfile = h5py.File(hfilename, "a")
print("Writing to file {}".format(hfilename))
write_results.write_h5_header(hfile, run_params, model)
write_results.write_obs_to_h5(hfile, obsdat)
except(ImportError):
hfile = None
# -----------------------------------------
# Initial guesses from end of last chain
# -----------------------------------------
initial_positions = result["chain"][:, -1, :]
guesses = None
initial_center = initial_positions.mean(axis=0)
# ---------------------
# Sampling
# -----------------------
if rp['verbose']:
print('emcee sampling...')
tstart = time.time()
out = fitting.restart_emcee_sampler(lnprobfn, initial_positions,
postkwargs=postkwargs,
pool=pool, hdf5=hfile, **rp)
esampler = out
edur = time.time() - tstart
if rp['verbose']:
print('done emcee in {0}s'.format(edur))
# -------------------------
# Output HDF5 (and pickles if asked for)
# -------------------------
print("Writing to {}".format(outroot))
if rp.get("output_pickles", False):
write_results.write_pickles(rp, model, obsdat, esampler, guesses,
outroot=outroot, toptimize=0, tsample=edur,
sampling_initial_center=initial_center)
if hfile is None:
hfile = hfilename
write_results.write_hdf5(hfile, rp, model, obsdat, esampler, guesses,
toptimize=0, tsample=edur,
sampling_initial_center=initial_center)
try:
hfile.close()
except:
pass
halt('Finished')
| mit | f660792cde39df74e40bd32d65fd674a | 30.607143 | 87 | 0.582345 | 3.610403 | false | false | false | false |
bd-j/prospector | prospect/utils/prospect_args.py | 1 | 7518 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""prospect_args.py - methods to get a default argument parser for prospector.
"""
import argparse
__all__ = ["get_parser", "show_default_args"]
def show_default_args():
parser = get_parser()
parser.print_help()
def get_parser(fitters=["optimize", "emcee", "dynesty"]):
"""Get a default prospector argument parser
"""
parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
# --- Basic ---
parser.add_argument("--verbose", type=int, default=1,
help="Whether to print lots of stuff")
parser.add_argument("--debug", dest="debug", action="store_true",
help=("If set, halt execution just before optimization and sampling, "
"but after the obs, model, and sps objects have been built."))
parser.set_defaults(debug=False)
parser.add_argument("--outfile", type=str, default="prospector_test_run",
help="Root name (including path) of the output file(s).")
parser.add_argument("--output_pickle", action="store_true",
help="If set, output pickles in addition to HDF5.")
# --- SPS parameters ---
parser.add_argument("--zcontinuous", type=int, default=1,
help=("The type of metallicity parameterization to use. "
"See python-FSPS documentation for details."))
if "optimize" in fitters:
parser = add_optimize_args(parser)
if "emcee" in fitters:
parser = add_emcee_args(parser)
if "dynesty" in fitters:
parser = add_dynesty_args(parser)
return parser
def add_optimize_args(parser):
# --- Optimization ---
parser.add_argument("--optimize", action="store_true",
help="If set, do an optimization before sampling.")
parser.add_argument("--min_method", type=str, default="lm",
help=("The scipy.optimize method to use for minimization."
"One of 'lm' (Levenberg-Marquardt) or 'powell' (powell line-search.)"))
parser.add_argument("--min_opts", type=dict, default={},
help="Minimization parameters. See scipy.optimize.")
parser.add_argument("--nmin", type=int, default=1,
help=("Number of draws from the prior from which to start minimization."
"nmin > 1 can be useful to avoid local minima"))
return parser
def add_emcee_args(parser):
# --- emcee fitting ----
parser.add_argument("--emcee", action="store_true",
help="If set, do ensemble MCMC sampling with emcee.")
parser.add_argument("--nwalkers", type=int, default=64,
help="Number of `emcee` walkers.")
parser.add_argument("--niter", type=int, default=512,
help="Number of iterations in the `emcee` production run")
parser.add_argument("--nburn", type=int, nargs="*", default=[16, 32, 64],
help=("Specify the rounds of burn-in for `emcee` by giving the "
"number of iterations in each round as a list. "
"After each round the walkers are reinitialized based "
"on the locations of the best half of the walkers."))
parser.add_argument("--save_interval", dest="interval", type=float, default=0.2,
help=("Number between 0 and 1 giving the fraction of the `emcee` "
"production run at which to write the current chains to "
"disk. Useful in case an expensive `emcee` run dies."))
parser.add_argument("--restart_from", type=str, default="",
help=("If given, the name of a file that contains a previous "
"`emcee` run from which to try and restart emcee sampling."
"In this case niter gives the number of additional iterations"
" to run for; all other options are ignored "
"(they are taken from the previous run.)"))
parser.add_argument("--ensemble_dispersion", dest="initial_disp", type=float, default=0.1,
help=("Initial dispersion in parameter value for the `emcee` walkers."
" This can be overriden for individual parameters by adding an 'init_disp' "
"key to the parameter specification dictionary for that parameter."))
return parser
def add_dynesty_args(parser):
# --- dynesty parameters ---
parser.add_argument("--dynesty", action="store_true",
help="If set, do nested sampling with dynesty.")
parser.add_argument("--nested_bound", type=str, default="multi",
choices=["single", "multi", "balls", "cubes"],
help=("Method for bounding the prior volume when drawing new points. "
"One of single | multi | balls | cubes"))
parser.add_argument("--nested_sample", "--nested_method", type=str, dest="nested_sample",
default="slice", choices=["unif", "rwalk", "slice"],
help=("Method for drawing new points during sampling. "
"One of unif | rwalk | slice"))
parser.add_argument("--nested_walks", type=int, default=48,
help=("Number of Metropolis steps to take when "
"`nested_sample` is 'rwalk'"))
parser.add_argument("--nlive_init", dest="nested_nlive_init", type=int, default=100,
help="Number of live points for the intial nested sampling run.")
parser.add_argument("--nlive_batch", dest="nested_nlive_batch", type=int, default=100,
help="Number of live points for the dynamic nested sampling batches")
parser.add_argument("--nested_dlogz_init", type=float, default=0.05,
help=("Stop the initial run when the remaining evidence is estimated "
"to be less than this."))
parser.add_argument("--nested_maxcall", type=int, default=int(5e7),
help=("Maximum number of likelihood calls during nested sampling. "
"This will only be enforced after the initial pass"))
parser.add_argument("--nested_maxiter", type=int, default=int(1e6),
help=("Maximum number of iterations during nested sampling. "
"This will only be enforced after the initial pass"))
parser.add_argument("--nested_maxbatch", type=int, default=10,
help="Maximum number of dynamic batches.")
parser.add_argument("--nested_bootstrap", type=int, default=0,
help=("Number of bootstrap resamplings to use when estimating "
"ellipsoid expansion factor."))
parser.add_argument("--nested_target_n_effective", type=int, default=10000,
help=("Stop when the number of *effective* posterior samples as estimated "
"by dynesty reaches the target number."))
return parser
def add_data_args(parser):
# --- data manipulation
# logify_spectrum
# normalize_spectrum
return parser
| mit | 8addbfef31fac78d5047230354a73eda | 44.017964 | 106 | 0.568901 | 4.48568 | false | false | false | false |
bd-j/prospector | prospect/likelihood/likelihood.py | 1 | 7851 | import time, sys, os
import numpy as np
from scipy.linalg import LinAlgError
__all__ = ["lnlike_spec", "lnlike_phot", "chi_spec", "chi_phot", "write_log"]
def lnlike_spec(spec_mu, obs=None, spec_noise=None, f_outlier_spec=0.0, **vectors):
"""Calculate the likelihood of the spectroscopic data given the
spectroscopic model. Allows for the use of a gaussian process
covariance matrix for multiplicative residuals.
:param spec_mu:
The mean model spectrum, in linear or logarithmic units, including
e.g. calibration and sky emission.
:param obs: (optional)
A dictionary of the observational data, including the keys
*``spectrum`` a numpy array of the observed spectrum, in linear or
logarithmic units (same as ``spec_mu``).
*``unc`` the uncertainty of same length as ``spectrum``
*``mask`` optional boolean array of same length as ``spectrum``
*``wavelength`` if using a GP, the metric that is used in the
kernel generation, of same length as ``spectrum`` and typically
giving the wavelength array.
:param spec_noise: (optional)
A NoiseModel object with the methods `compute` and `lnlikelihood`.
If ``spec_noise`` is supplied, the `wavelength` entry in the obs
dictionary must exist.
:param f_outlier_spec: (optional)
The fraction of spectral pixels which are considered outliers
by the mixture model
:param vectors: (optional)
A dictionary of vectors of same length as ``wavelength`` giving
possible weghting functions for the kernels
:returns lnlikelhood:
The natural logarithm of the likelihood of the data given the mean
model spectrum.
"""
if obs['spectrum'] is None:
return 0.0
mask = obs.get('mask', slice(None))
vectors['mask'] = mask
vectors['wavelength'] = obs['wavelength']
delta = (obs['spectrum'] - spec_mu)[mask]
var = (obs['unc'][mask])**2
if spec_noise is not None:
try:
spec_noise.compute(**vectors)
if (f_outlier_spec == 0.0):
return spec_noise.lnlikelihood(spec_mu[mask], obs['spectrum'][mask])
# disallow (correlated noise model + mixture model)
# and redefine errors
assert spec_noise.Sigma.ndim == 1
var = spec_noise.Sigma
except(LinAlgError):
return np.nan_to_num(-np.inf)
lnp = -0.5*( (delta**2/var) + np.log(2*np.pi*var) )
if (f_outlier_spec == 0.0):
return lnp.sum()
else:
var_bad = var * (vectors["nsigma_outlier_spec"]**2)
lnp_bad = -0.5*( (delta**2/var_bad) + np.log(2*np.pi*var_bad) )
lnp_tot = np.logaddexp(lnp + np.log(1-f_outlier_spec), lnp_bad + np.log(f_outlier_spec))
return lnp_tot.sum()
def lnlike_phot(phot_mu, obs=None, phot_noise=None, f_outlier_phot=0.0, **vectors):
"""Calculate the likelihood of the photometric data given the spectroscopic
model. Allows for the use of a gaussian process covariance matrix.
:param phot_mu:
The mean model sed, in linear flux units (i.e. maggies).
:param obs: (optional)
A dictionary of the observational data, including the keys
*``maggies`` a numpy array of the observed SED, in linear flux
units
*``maggies_unc`` the uncertainty of same length as ``maggies``
*``phot_mask`` optional boolean array of same length as
``maggies``
*``filters`` optional list of sedpy.observate.Filter objects,
necessary if using fixed filter groups with different gp
amplitudes for each group.
If not supplied then the obs dictionary given at initialization will
be used.
:param phot_noise: (optional)
A ``prospect.likelihood.NoiseModel`` object with the methods
``compute()`` and ``lnlikelihood()``. If not supplied a simple chi^2
likelihood will be evaluated.
:param f_outlier_phot: (optional)
The fraction of photometric bands which are considered outliers
by the mixture model
:param vectors:
A dictionary of possibly relevant vectors of same length as maggies
that will be passed to the NoiseModel object for constructing weighted
covariance matrices.
:returns lnlikelhood:
The natural logarithm of the likelihood of the data given the mean
model spectrum.
"""
if obs['maggies'] is None:
return 0.0
mask = obs.get('phot_mask', slice(None))
delta = (obs['maggies'] - phot_mu)[mask]
var = (obs['maggies_unc'][mask])**2
psamples = obs.get('phot_samples', None)
if phot_noise is not None:
try:
filternames = obs['filters'].filternames
except(AttributeError):
filternames = [f.name for f in obs['filters']]
vectors['mask'] = mask
vectors['filternames'] = np.array(filternames)
vectors['phot_samples'] = psamples
try:
phot_noise.compute(**vectors)
if (f_outlier_phot == 0.0):
return phot_noise.lnlikelihood(phot_mu[mask], obs['maggies'][mask])
# disallow (correlated noise model + mixture model)
# and redefine errors
assert phot_noise.Sigma.ndim == 1
var = phot_noise.Sigma
except(LinAlgError):
return np.nan_to_num(-np.inf)
# simple noise model
lnp = -0.5*( (delta**2/var) + np.log(2*np.pi*var) )
if (f_outlier_phot == 0.0):
return lnp.sum()
else:
var_bad = var * (vectors["nsigma_outlier_phot"]**2)
lnp_bad = -0.5*( (delta**2/var_bad) + np.log(2*np.pi*var_bad) )
lnp_tot = np.logaddexp(lnp + np.log(1-f_outlier_phot), lnp_bad + np.log(f_outlier_phot))
return lnp_tot.sum()
def chi_phot(phot_mu, obs, **extras):
"""Return a vector of chi values, for use in non-linear least-squares
algorithms.
:param phot_mu:
Model photometry, same units as the photometry in `obs`.
:param obs:
An observational data dictionary, with the keys ``"maggies"`` and
``"maggies_unc"``. If ``"maggies"`` is None then an empty array is
returned.
:returns chi:
An array of noise weighted residuals, same length as the number of
unmasked phtometric points.
"""
if obs['maggies'] is None:
return np.array([])
mask = obs.get('phot_mask', slice(None))
delta = (obs['maggies'] - phot_mu)[mask]
unc = obs['maggies_unc'][mask]
chi = delta / unc
return chi
def chi_spec(spec_mu, obs, **extras):
"""Return a vector of chi values, for use in non-linear least-squares
algorithms.
:param spec_mu:
Model spectroscopy, same units as the photometry in `obs`.
:param obs:
An observational data dictionary, with the keys ``"spectrum"`` and
``"unc"``. If ``"spectrum"`` is None then an empty array is returned.
Optinally a ``"mask"`` boolean vector may be supplied that will be used
to index the residual vector.
:returns chi:
An array of noise weighted residuals, same length as the number of
unmasked spectroscopic points.
"""
if obs['spectrum'] is None:
return np.array([])
mask = obs.get('mask', slice(None))
delta = (obs['spectrum'] - spec_mu)[mask]
unc = obs['unc'][mask]
chi = delta / unc
return chi
def write_log(theta, lnp_prior, lnp_spec, lnp_phot, d1, d2):
"""Write all sorts of documentary info for debugging.
"""
print(theta)
print('model calc = {0}s, lnlike calc = {1}'.format(d1, d2))
fstring = 'lnp = {0}, lnp_spec = {1}, lnp_phot = {2}'
values = [lnp_spec + lnp_phot + lnp_prior, lnp_spec, lnp_phot]
print(fstring.format(*values))
| mit | 75357ead569f38b38a7f50f494c65dae | 35.179724 | 96 | 0.617374 | 3.596427 | false | false | false | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.