ngram
listlengths
0
82k
[ "# Copyright (c) 2013, VHRS and contributors # For license", "if cg.education_check3 == 1: emp = frappe.get_doc(\"Education Check3\", { \"applicant_id\":", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check2\", {", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check5\",", "Status\") + \":Data:150\", _(\"Family Check Status\") + \":Data:150\", _(\"Ref Check1", "\":Data:150\", _(\"Ref Check3 Status\") + \":Data:150\", _(\"Ref Check4 Status\") +", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\":", "\":Data:150\", _(\"Criminal Check2 Status\") + \":Data:150\", _(\"ID Check1 Status\") +", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.employment_check2", "frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "== 1: emp = frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id}) if", "[\"-\"] if cg.education_check1 == 1: if frappe.db.exists(\"Education Check1\", { \"applicant_id\":", "[\"-\"] if cg.education_check2 == 1: emp = frappe.get_doc(\"Education Check2\", {", "frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "+= [\"-\"] if cg.employment_check2 == 1: emp = frappe.get_doc(\"Employment Check2\",", "row += [\"-\"] if cg.address_check2 == 1: emp = frappe.get_doc(\"Address", "else: vemp = frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\": app.ref_id}) row", "1: if frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education", "else: vemp = frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\": app.ref_id}) row", "cg.family_check1 == 1: emp = frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id})", "+= [vemp.status] else: row += [\"-\"] if cg.education_check2 == 1:", "Check5 Status\") + \":Data:150\", _(\"ID Check6 Status\") + \":Data:150\", ]", "[emp.status] else: vemp = frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\": app.ref_id})", "msgprint from frappe.utils import (cint, cstr, date_diff, flt, getdate, money_in_words,", "= frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "+ \":Data:150\", _(\"ID Check4 Status\") + \":Data:150\", _(\"ID Check5 Status\")", "columns = get_columns() data = [] row = [] filters", "if cg.address_check1 == 1: emp = frappe.get_doc(\"Address Check1\", { \"applicant_id\":", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Criminal", "import unicode_literals import frappe from frappe import _, msgprint from", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\":", "== 1: emp = frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id}) if", "[vemp.status] else: row += [\"-\"] if cg.civil_check == 1: emp", "\":Data:150\", _(\"Checks Group Name\") + \":Data:150\", _(\"Emp Check1 Status\") +", "1: emp = frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id}) if emp.status", "Check5\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "row += [vemp.status] else: row += [\"-\"] if cg.criminal_check ==", "[\"-\"] if cg.id_check1 == 1: emp = frappe.get_doc(\"ID Check1\", {", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check2\",", "+= [vemp.status] else: row += [\"-\"] if cg.address_check3 == 1:", "= frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "row += [\"-\"] if cg.id_check4 == 1: emp = frappe.get_doc(\"ID", "[vemp.status] else: row += [\"-\"] if cg.address_check2 == 1: emp", "Address Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "_(\"VHRS Ref. No\") + \":Data:150\", _(\"Candidate Name\") + \":Data:180\", _(\"Start", "vemp = frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\": app.ref_id}) row +=", "see license.txt from __future__ import unicode_literals import frappe from frappe", "frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "No\") + \":Data:150\", _(\"Candidate Name\") + \":Data:180\", _(\"Start Date\") +", "+ \":Data:150\", _(\"Family Check Status\") + \":Data:150\", _(\"Ref Check1 Status\")", "emp = frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "\":Data:150\", ] return columns def applicants(filters): applicant = frappe.db.sql( \"\"\"select", "frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] data.append(row) return", "Name\") + \":Link/Customer:200\", _(\"VHRS Ref. No\") + \":Data:150\", _(\"Candidate Name\")", "license.txt from __future__ import unicode_literals import frappe from frappe import", "Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "vemp = frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\": app.ref_id}) row +=", "else: row += [\"-\"] if cg.education_check2 == 1: emp =", "else: vemp = frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\": app.ref_id}) row", "frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "= frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+= [\"-\"] if cg.address_check1 == 1: emp = frappe.get_doc(\"Address Check1\",", "row += [vemp.status] else: row += [\"-\"] if cg.address_check4 ==", "frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "[vemp.status] else: row += [\"-\"] if cg.id_check6 == 1: emp", "\":Data:150\", _(\"Ref Check2 Status\") + \":Data:150\", _(\"Ref Check3 Status\") +", "frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "cg.criminal_check == 1: emp = frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id})", "_(\"Edu Check1 Status\") + \":Data:150\", _(\"Edu Check2 Status\") + \":Data:150\",", "frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education Check1\", {", "_(\"Checks Group Name\") + \":Data:150\", _(\"Emp Check1 Status\") + \":Data:150\",", "= frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "row += [\"-\"] data.append(row) return columns, data def get_columns(): columns", "_, msgprint from frappe.utils import (cint, cstr, date_diff, flt, getdate,", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID", "row += [vemp.status] else: row += [\"-\"] if cg.education_check1 ==", "app.candidate_name, app.in_date, app.status, app.checks_group] if app.status != \"Entry Pending\": cg", "frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "= frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "[vemp.status] else: row += [\"-\"] if cg.id_check3 == 1: emp", "= frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "Check5\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "and contributors # For license information, please see license.txt from", "cg.education_check4 == 1: emp = frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id})", "data = [] row = [] filters applicant = applicants(filters)", "Status\") + \":Data:150\", _(\"Ref Check4 Status\") + \":Data:150\", _(\"Civil Check1", "vemp = frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\": app.ref_id}) row +=", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\":", "Check1 Status\") + \":Data:150\", _(\"Ref Check2 Status\") + \":Data:150\", _(\"Ref", "[\"-\"] if cg.reference_check1 == 1: emp = frappe.get_doc(\"Reference Check1\", {", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.address_check2", "= frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "# For license information, please see license.txt from __future__ import", "ID Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "cg.reference_check1 == 1: emp = frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id})", "vemp = frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\": app.ref_id}) row +=", "else: row += [\"-\"] if cg.address_check2 == 1: emp =", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\":", "else: row += [\"-\"] if cg.family_check1 == 1: emp =", "Reference Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "emp = frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id}) if emp.status !=", "frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1 == 1: emp = frappe.get_doc(\"Employment", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Civil", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check6\",", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check3", "_(\"ID Check3 Status\") + \":Data:150\", _(\"ID Check4 Status\") + \":Data:150\",", "\":Data:150\", _(\"Add Check3 Status\") + \":Data:150\", _(\"Add Check4 Status\") +", "_(\"Criminal Check2 Status\") + \":Data:150\", _(\"ID Check1 Status\") + \":Data:150\",", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\":", "+ \":Data:150\", _(\"Add Check3 Status\") + \":Data:150\", _(\"Add Check4 Status\")", "[\"-\"] if cg.id_check3 == 1: emp = frappe.get_doc(\"ID Check3\", {", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check3\",", "else: vemp = frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\": app.ref_id}) row", "1: emp = frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "= frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "[\"-\"] if cg.family_check1 == 1: emp = frappe.get_doc(\"Family Check1\", {", "`tabApplicant` app where app.in_date between %(start_date)s and %(end_date)s order by", "order by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\": filters.get(\"to_date\") }, as_dict=1)", "else: row += [\"-\"] if cg.education_check1 == 1: if frappe.db.exists(\"Education", "else: vemp = frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\": app.ref_id}) row", "cg.id_check5 == 1: emp = frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id})", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education", "monthrange def execute(filters=None): columns = get_columns() data = [] row", "= frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "cg.education_check3 == 1: emp = frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id})", "emp = frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id}) if emp.status !=", "Name\") + \":Data:150\", _(\"Emp Check1 Status\") + \":Data:150\", _(\"Emp Check2", "row += [\"-\"] if cg.address_check3 == 1: emp = frappe.get_doc(\"Address", "row += [\"-\"] if cg.reference_check2 == 1: emp = frappe.get_doc(\"Reference", "emp = frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id}) if emp.status !=", "if cg.id_check2 == 1: emp = frappe.get_doc(\"ID Check2\", { \"applicant_id\":", "unicode_literals import frappe from frappe import _, msgprint from frappe.utils", "+= [\"-\"] if cg.education_check3 == 1: emp = frappe.get_doc(\"Education Check3\",", "== 1: emp = frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id}) if", "+ \":Data:150\", _(\"Edu Check3 Status\") + \":Data:150\", _(\"Edu Check4 Status\")", "+ \":Data:150\", _(\"ID Check1 Status\") + \":Data:150\", _(\"ID Check2 Status\")", "== 1: emp = frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id}) if", "\":Data:150\", _(\"ID Check4 Status\") + \":Data:150\", _(\"ID Check5 Status\") +", "if cg.education_check4 == 1: emp = frappe.get_doc(\"Education Check4\", { \"applicant_id\":", "emp = frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id}) if emp.status !=", "cg.reference_check4 == 1: emp = frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id})", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Family", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check3\", {", "vemp = frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\": app.ref_id}) row +=", "else: vemp = frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\": app.ref_id}) row", "else: row += [\"-\"] if cg.id_check6 == 1: emp =", "Check6 Status\") + \":Data:150\", ] return columns def applicants(filters): applicant", "[emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\": app.ref_id})", "[emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\": app.ref_id})", "+ \":Data:150\", _(\"Civil Check1 Status\") + \":Data:150\", _(\"Criminal Check2 Status\")", "if cg.reference_check2 == 1: emp = frappe.get_doc(\"Reference Check2\", { \"applicant_id\":", "cg.id_check3 == 1: emp = frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id})", "+ \":Data:180\", _(\"Start Date\") + \":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks", "[\"-\"] if cg.employment_check2 == 1: emp = frappe.get_doc(\"Employment Check2\", {", "return columns def applicants(filters): applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from", "\":Data:150\", _(\"ID Check2 Status\") + \":Data:150\", _(\"ID Check3 Status\") +", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check1\",", "frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "information, please see license.txt from __future__ import unicode_literals import frappe", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.employment_check3", "if frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education Check1\",", "= frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "_(\"Edu Check4 Status\") + \":Data:150\", _(\"Add Check1 Status\") + \":Data:150\",", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\":", "applicants(filters) for app in applicant: row = [app.customer, app.ref_id, app.candidate_name,", "= frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1 == 1: emp =", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check2", "[emp.status] else: vemp = frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\": app.ref_id})", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Civil Check\", {", "frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "+ \":Data:150\", _(\"Emp Check4 Status\") + \":Data:150\", _(\"Edu Check1 Status\")", "frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "+ \":Data:150\", _(\"ID Check6 Status\") + \":Data:150\", ] return columns", "import date import datetime from calendar import monthrange def execute(filters=None):", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.address_check4", "\"applicant_id\": app.ref_id}) row += [vemp.status] else: row += [\"-\"] data.append(row)", "Ref. No\") + \":Data:150\", _(\"Candidate Name\") + \":Data:180\", _(\"Start Date\")", "cg.id_check2 == 1: emp = frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id})", "else: vemp = frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\": app.ref_id}) row", "[\"-\"] if cg.id_check2 == 1: emp = frappe.get_doc(\"ID Check2\", {", "cg.employment_check4 == 1: emp = frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id})", "1: emp = frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id}) if emp.status", "_(\"Ref Check2 Status\") + \":Data:150\", _(\"Ref Check3 Status\") + \":Data:150\",", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.reference_check1", "\"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row += [emp.status]", "row += [\"-\"] if cg.criminal_check == 1: emp = frappe.get_doc(\"Criminal", "+= [vemp.status] else: row += [\"-\"] if cg.family_check1 == 1:", "+ \":Data:150\", _(\"Edu Check4 Status\") + \":Data:150\", _(\"Add Check1 Status\")", "emp = frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id}) if emp.status !=", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.family_check1", "frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "else: vemp = frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\": app.ref_id}) row", "_(\"Emp Check2 Status\") + \":Data:150\", _(\"Emp Check3 Status\") + \":Data:150\",", "app where app.in_date between %(start_date)s and %(end_date)s order by app.in_date\"\"\",", "[\"-\"] data.append(row) return columns, data def get_columns(): columns = [", "else: vemp = frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\": app.ref_id}) row", "vemp = frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\": app.ref_id}) row +=", "!= \"Entry Pending\": cg = frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1", "frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "if cg.employment_check4 == 1: emp = frappe.get_doc(\"Employment Check4\", { \"applicant_id\":", "vemp = frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\": app.ref_id}) row +=", "app.checks_group) if cg.employment_check1 == 1: emp = frappe.get_doc(\"Employment Check1\", {", "emp = frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id}) if emp.status !=", "license information, please see license.txt from __future__ import unicode_literals import", "row += [\"-\"] if cg.education_check2 == 1: emp = frappe.get_doc(\"Education", "row += [vemp.status] else: row += [\"-\"] if cg.id_check6 ==", "between %(start_date)s and %(end_date)s order by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"),", "= frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\":", "frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "else: row += [\"-\"] if cg.address_check4 == 1: emp =", "getdate, money_in_words, nowdate, rounded, today) from datetime import datetime from", "date import datetime from calendar import monthrange def execute(filters=None): columns", "if cg.reference_check4 == 1: emp = frappe.get_doc(\"Reference Check4\", { \"applicant_id\":", "emp = frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id}) if emp.status !=", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check1\", {", "_(\"Add Check3 Status\") + \":Data:150\", _(\"Add Check4 Status\") + \":Data:150\",", "emp = frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id}) if emp.status !=", "cg.education_check2 == 1: emp = frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id})", "if cg.reference_check3 == 1: emp = frappe.get_doc(\"Reference Check3\", { \"applicant_id\":", "else: vemp = frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\": app.ref_id}) row", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check3\",", "= frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "applicant = applicants(filters) for app in applicant: row = [app.customer,", "app.in_date between %(start_date)s and %(end_date)s order by app.in_date\"\"\", { \"start_date\":", "else: row += [\"-\"] if cg.employment_check3 == 1: emp =", "+= [vemp.status] else: row += [\"-\"] if cg.id_check4 == 1:", "] return columns def applicants(filters): applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status", "+= [\"-\"] if cg.criminal_check == 1: emp = frappe.get_doc(\"Criminal Check\",", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check4\", {", "\":Data:150\", _(\"Edu Check4 Status\") + \":Data:150\", _(\"Add Check1 Status\") +", "Check1 Status\") + \":Data:150\", _(\"Criminal Check2 Status\") + \":Data:150\", _(\"ID", "import monthrange def execute(filters=None): columns = get_columns() data = []", "Check2 Status\") + \":Data:150\", _(\"Emp Check3 Status\") + \":Data:150\", _(\"Emp", "+ \":Data:150\", _(\"Emp Check1 Status\") + \":Data:150\", _(\"Emp Check2 Status\")", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Criminal Check\",", "Status\") + \":Data:150\", _(\"ID Check1 Status\") + \":Data:150\", _(\"ID Check2", "1: emp = frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id}) if emp.status", "else: row += [\"-\"] if cg.education_check4 == 1: emp =", "else: vemp = frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\": app.ref_id}) row", "[\"-\"] if cg.reference_check4 == 1: emp = frappe.get_doc(\"Reference Check4\", {", "Status\") + \":Data:150\", _(\"Emp Check2 Status\") + \":Data:150\", _(\"Emp Check3", "else: vemp = frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\": app.ref_id}) row", "\":Data:150\", _(\"Candidate Name\") + \":Data:180\", _(\"Start Date\") + \":Date:150\", _(\"Status\")", "cg.employment_check1 == 1: emp = frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id})", "\"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date between %(start_date)s and", "Employment Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check3\", {", "\":Data:150\", _(\"Edu Check3 Status\") + \":Data:150\", _(\"Edu Check4 Status\") +", "{ \"applicant_id\": app.ref_id}) row += [vemp.status] else: row += [\"-\"]", "date_diff, flt, getdate, money_in_words, nowdate, rounded, today) from datetime import", "def execute(filters=None): columns = get_columns() data = [] row =", "= frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check5\", {", "[vemp.status] else: row += [\"-\"] if cg.employment_check3 == 1: emp", "row += [\"-\"] if cg.education_check1 == 1: if frappe.db.exists(\"Education Check1\",", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.education_check3", "frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "= [ _(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS Ref. No\") +", "+ \":Data:150\", _(\"Ref Check3 Status\") + \":Data:150\", _(\"Ref Check4 Status\")", "For license information, please see license.txt from __future__ import unicode_literals", "cg.id_check1 == 1: emp = frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id})", "import (cint, cstr, date_diff, flt, getdate, money_in_words, nowdate, rounded, today)", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.address_check1", "(c) 2013, VHRS and contributors # For license information, please", "row += [vemp.status] else: row += [\"-\"] if cg.id_check2 ==", "from datetime import date import datetime from calendar import monthrange", "if cg.address_check3 == 1: emp = frappe.get_doc(\"Address Check3\", { \"applicant_id\":", "Address Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "vemp = frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\": app.ref_id}) row +=", "[vemp.status] else: row += [\"-\"] if cg.employment_check2 == 1: emp", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\":", "cg.address_check1 == 1: emp = frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id})", "else: row += [\"-\"] if cg.employment_check2 == 1: emp =", "columns def applicants(filters): applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant`", "Status\") + \":Data:150\", ] return columns def applicants(filters): applicant =", "frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "row += [\"-\"] if cg.reference_check3 == 1: emp = frappe.get_doc(\"Reference", "[\"-\"] if cg.id_check4 == 1: emp = frappe.get_doc(\"ID Check4\", {", "row += [vemp.status] else: row += [\"-\"] data.append(row) return columns,", "else: row += [\"-\"] if cg.id_check3 == 1: emp =", "[vemp.status] else: row += [\"-\"] if cg.reference_check2 == 1: emp", "frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "rounded, today) from datetime import datetime from datetime import date", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check2\", {", "= applicants(filters) for app in applicant: row = [app.customer, app.ref_id,", "Check6\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "if cg.id_check3 == 1: emp = frappe.get_doc(\"ID Check3\", { \"applicant_id\":", "== 1: emp = frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id}) if", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check1\", {", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.civil_check", "row += [\"-\"] if cg.employment_check3 == 1: emp = frappe.get_doc(\"Employment", "+ \":Data:150\", _(\"Add Check1 Status\") + \":Data:150\", _(\"Add Check2 Status\")", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check4\",", "+= [vemp.status] else: row += [\"-\"] if cg.id_check6 == 1:", "Reference Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "contributors # For license information, please see license.txt from __future__", "row += [\"-\"] if cg.reference_check1 == 1: emp = frappe.get_doc(\"Reference", "else: row += [\"-\"] if cg.reference_check4 == 1: emp =", "== 1: emp = frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id}) if", "== 1: emp = frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id}) if", "frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "Education Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "1: emp = frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id}) if emp.status", "frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Family Check1\",", "Status\") + \":Data:150\", _(\"ID Check5 Status\") + \":Data:150\", _(\"ID Check6", "[emp.status] else: vemp = frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\": app.ref_id})", "cg.address_check3 == 1: emp = frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id})", "\":Link/Customer:200\", _(\"VHRS Ref. No\") + \":Data:150\", _(\"Candidate Name\") + \":Data:180\",", "Check3 Status\") + \":Data:150\", _(\"Edu Check4 Status\") + \":Data:150\", _(\"Add", "row += [vemp.status] else: row += [\"-\"] if cg.family_check1 ==", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\": app.ref_id})", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\":", "= frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+= [vemp.status] else: row += [\"-\"] if cg.id_check2 == 1:", "+= [vemp.status] else: row += [\"-\"] if cg.id_check5 == 1:", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check3\",", "= frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "= frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "\":Data:150\", _(\"ID Check6 Status\") + \":Data:150\", ] return columns def", "data.append(row) return columns, data def get_columns(): columns = [ _(\"Project", "execute(filters=None): columns = get_columns() data = [] row = []", "vemp = frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\": app.ref_id}) row +=", "[emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\": app.ref_id})", "== 1: emp = frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id}) if", "= get_columns() data = [] row = [] filters applicant", "+= [\"-\"] if cg.employment_check4 == 1: emp = frappe.get_doc(\"Employment Check4\",", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\":", "\":Data:150\", _(\"Emp Check3 Status\") + \":Data:150\", _(\"Emp Check4 Status\") +", "Status\") + \":Data:150\", _(\"ID Check6 Status\") + \":Data:150\", ] return", "\":Data:150\", _(\"Emp Check4 Status\") + \":Data:150\", _(\"Edu Check1 Status\") +", "row += [vemp.status] else: row += [\"-\"] if cg.address_check3 ==", "+= [\"-\"] if cg.education_check2 == 1: emp = frappe.get_doc(\"Education Check2\",", "+ \":Data:150\", _(\"Candidate Name\") + \":Data:180\", _(\"Start Date\") + \":Date:150\",", "+= [vemp.status] else: row += [\"-\"] if cg.reference_check2 == 1:", "== 1: if frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}): emp =", "+ \":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks Group Name\") + \":Data:150\",", "vemp = frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\": app.ref_id}) row +=", "+= [\"-\"] if cg.family_check1 == 1: emp = frappe.get_doc(\"Family Check1\",", "Check3 Status\") + \":Data:150\", _(\"Emp Check4 Status\") + \":Data:150\", _(\"Edu", "Copyright (c) 2013, VHRS and contributors # For license information,", "+= [\"-\"] if cg.id_check5 == 1: emp = frappe.get_doc(\"ID Check5\",", "columns, data def get_columns(): columns = [ _(\"Project Name\") +", "+ \":Data:150\", _(\"ID Check5 Status\") + \":Data:150\", _(\"ID Check6 Status\")", "+= [vemp.status] else: row += [\"-\"] if cg.reference_check4 == 1:", "row += [\"-\"] if cg.id_check3 == 1: emp = frappe.get_doc(\"ID", "Status\") + \":Data:150\", _(\"Ref Check3 Status\") + \":Data:150\", _(\"Ref Check4", "if cg.employment_check2 == 1: emp = frappe.get_doc(\"Employment Check2\", { \"applicant_id\":", "[\"-\"] if cg.address_check1 == 1: emp = frappe.get_doc(\"Address Check1\", {", "else: vemp = frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\": app.ref_id}) row", "1: emp = frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "ID Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "row += [vemp.status] else: row += [\"-\"] if cg.id_check1 ==", "cstr, date_diff, flt, getdate, money_in_words, nowdate, rounded, today) from datetime", "[emp.status] else: vemp = frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\": app.ref_id})", "Education Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "Criminal Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "else: vemp = frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\": app.ref_id}) row", "app.in_date, app.status, app.checks_group] if app.status != \"Entry Pending\": cg =", "_(\"Family Check Status\") + \":Data:150\", _(\"Ref Check1 Status\") + \":Data:150\",", "[\"-\"] if cg.address_check4 == 1: emp = frappe.get_doc(\"Address Check4\", {", "vemp = frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\": app.ref_id}) row +=", "Check Status\") + \":Data:150\", _(\"Ref Check1 Status\") + \":Data:150\", _(\"Ref", "\":Data:150\", _(\"Ref Check4 Status\") + \":Data:150\", _(\"Civil Check1 Status\") +", "emp = frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "\":Data:150\", _(\"ID Check5 Status\") + \":Data:150\", _(\"ID Check6 Status\") +", "== 1: emp = frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id}) if", "\":Data:150\", _(\"Ref Check1 Status\") + \":Data:150\", _(\"Ref Check2 Status\") +", "+= [\"-\"] if cg.id_check3 == 1: emp = frappe.get_doc(\"ID Check3\",", "+ \":Data:150\", _(\"Emp Check3 Status\") + \":Data:150\", _(\"Emp Check4 Status\")", "+ \":Link/Customer:200\", _(\"VHRS Ref. No\") + \":Data:150\", _(\"Candidate Name\") +", "and %(end_date)s order by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\": filters.get(\"to_date\")", "== 1: emp = frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id}) if", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\":", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check2\", {", "else: row += [\"-\"] if cg.id_check4 == 1: emp =", "= frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "= [] row = [] filters applicant = applicants(filters) for", "row = [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if app.status", "_(\"Add Check2 Status\") + \":Data:150\", _(\"Add Check3 Status\") + \":Data:150\",", "applicants(filters): applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where", "import datetime from datetime import date import datetime from calendar", "[emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\": app.ref_id})", "+= [vemp.status] else: row += [\"-\"] if cg.address_check1 == 1:", "frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "Status\") + \":Data:150\", _(\"Ref Check1 Status\") + \":Data:150\", _(\"Ref Check2", "frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "\":Data:150\", _(\"ID Check1 Status\") + \":Data:150\", _(\"ID Check2 Status\") +", "\"applicant_id\": app.ref_id}) row += [vemp.status] else: row += [\"-\"] if", "[vemp.status] else: row += [\"-\"] if cg.education_check4 == 1: emp", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check4\",", "row += [vemp.status] else: row += [\"-\"] if cg.education_check4 ==", "emp = frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id}) if emp.status !=", "frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "if cg.address_check2 == 1: emp = frappe.get_doc(\"Address Check2\", { \"applicant_id\":", "from frappe.utils import (cint, cstr, date_diff, flt, getdate, money_in_words, nowdate,", "else: row += [\"-\"] if cg.id_check1 == 1: emp =", "app.checks_group] if app.status != \"Entry Pending\": cg = frappe.get_doc(\"Checks Group\",", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check5", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\":", "if cg.id_check6 == 1: emp = frappe.get_doc(\"ID Check6\", { \"applicant_id\":", "vemp = frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\": app.ref_id}) row +=", "+= [\"-\"] data.append(row) return columns, data def get_columns(): columns =", "Status\") + \":Data:150\", _(\"Add Check1 Status\") + \":Data:150\", _(\"Add Check2", "vemp = frappe.get_doc(\"Verify Employment Check2\", { \"applicant_id\": app.ref_id}) row +=", "Status\") + \":Data:150\", _(\"Add Check2 Status\") + \":Data:150\", _(\"Add Check3", "calendar import monthrange def execute(filters=None): columns = get_columns() data =", "row += [vemp.status] else: row += [\"-\"] if cg.id_check3 ==", "%(start_date)s and %(end_date)s order by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\":", "datetime import datetime from datetime import date import datetime from", "if cg.employment_check1 == 1: emp = frappe.get_doc(\"Employment Check1\", { \"applicant_id\":", "datetime import date import datetime from calendar import monthrange def", "row += [vemp.status] else: row += [\"-\"] if cg.address_check2 ==", "Status\") + \":Data:150\", _(\"Edu Check3 Status\") + \":Data:150\", _(\"Edu Check4", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.education_check4", "\":Data:180\", _(\"Start Date\") + \":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks Group", "Status\") + \":Data:150\", _(\"ID Check3 Status\") + \":Data:150\", _(\"ID Check4", "Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "Check4 Status\") + \":Data:150\", _(\"Edu Check1 Status\") + \":Data:150\", _(\"Edu", "frappe from frappe import _, msgprint from frappe.utils import (cint,", "+= [vemp.status] else: row += [\"-\"] if cg.employment_check2 == 1:", "else: vemp = frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\": app.ref_id}) row", "1: emp = frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id}) if emp.status", "_(\"Start Date\") + \":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks Group Name\")", "+ \":Data:150\", _(\"Emp Check2 Status\") + \":Data:150\", _(\"Emp Check3 Status\")", "frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "[vemp.status] else: row += [\"-\"] if cg.reference_check1 == 1: emp", "= frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "if cg.education_check2 == 1: emp = frappe.get_doc(\"Education Check2\", { \"applicant_id\":", "cg.address_check2 == 1: emp = frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id})", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\":", "else: row += [\"-\"] if cg.address_check3 == 1: emp =", "== 1: emp = frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id}) if", "_(\"Edu Check3 Status\") + \":Data:150\", _(\"Edu Check4 Status\") + \":Data:150\",", "\":Data:150\", _(\"Add Check1 Status\") + \":Data:150\", _(\"Add Check2 Status\") +", "= frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "from __future__ import unicode_literals import frappe from frappe import _,", "vemp = frappe.get_doc(\"Verify Employment Check3\", { \"applicant_id\": app.ref_id}) row +=", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check4\", {", "cg.id_check4 == 1: emp = frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id})", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Civil Check\",", "Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "Check2 Status\") + \":Data:150\", _(\"Add Check3 Status\") + \":Data:150\", _(\"Add", "if cg.employment_check3 == 1: emp = frappe.get_doc(\"Employment Check3\", { \"applicant_id\":", "= frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "[vemp.status] else: row += [\"-\"] if cg.address_check4 == 1: emp", "else: row += [\"-\"] if cg.reference_check1 == 1: emp =", "from calendar import monthrange def execute(filters=None): columns = get_columns() data", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\": app.ref_id})", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Family Check1\", {", "Check4 Status\") + \":Data:150\", _(\"Add Check1 Status\") + \":Data:150\", _(\"Add", "Status\") + \":Data:150\", _(\"Criminal Check2 Status\") + \":Data:150\", _(\"ID Check1", "+= [\"-\"] if cg.id_check4 == 1: emp = frappe.get_doc(\"ID Check4\",", "Status\") + \":Data:150\", _(\"Add Check4 Status\") + \":Data:150\", _(\"Family Check", "= frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "Address Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "row += [\"-\"] if cg.family_check1 == 1: emp = frappe.get_doc(\"Family", "+ \":Data:150\", _(\"Edu Check2 Status\") + \":Data:150\", _(\"Edu Check3 Status\")", "_(\"Civil Check1 Status\") + \":Data:150\", _(\"Criminal Check2 Status\") + \":Data:150\",", "datetime from datetime import date import datetime from calendar import", "\":Data:150\", _(\"Edu Check2 Status\") + \":Data:150\", _(\"Edu Check3 Status\") +", "= [] filters applicant = applicants(filters) for app in applicant:", "app.ref_id}) if emp.status != \"Allocation Completed\": row += [emp.status] else:", "Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "emp = frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id}) if emp.status !=", "Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "row += [\"-\"] if cg.education_check3 == 1: emp = frappe.get_doc(\"Education", "row += [\"-\"] if cg.id_check1 == 1: emp = frappe.get_doc(\"ID", "+= [\"-\"] if cg.id_check6 == 1: emp = frappe.get_doc(\"ID Check6\",", "= frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date between", "vemp = frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\": app.ref_id}) row +=", "== 1: emp = frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id}) if", "\":Data:150\", _(\"Add Check2 Status\") + \":Data:150\", _(\"Add Check3 Status\") +", "Employment Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "= frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "if cg.family_check1 == 1: emp = frappe.get_doc(\"Family Check1\", { \"applicant_id\":", "_(\"Status\") + \":Data:150\", _(\"Checks Group Name\") + \":Data:150\", _(\"Emp Check1", "Date\") + \":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks Group Name\") +", "_(\"Emp Check1 Status\") + \":Data:150\", _(\"Emp Check2 Status\") + \":Data:150\",", "+= [vemp.status] else: row += [\"-\"] if cg.civil_check == 1:", "nowdate, rounded, today) from datetime import datetime from datetime import", "vemp = frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\": app.ref_id}) row +=", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.education_check1", "for app in applicant: row = [app.customer, app.ref_id, app.candidate_name, app.in_date,", "row += [vemp.status] else: row += [\"-\"] if cg.education_check2 ==", "import frappe from frappe import _, msgprint from frappe.utils import", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check4", "get_columns() data = [] row = [] filters applicant =", "frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "vemp = frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\": app.ref_id}) row +=", "Check1 Status\") + \":Data:150\", _(\"Edu Check2 Status\") + \":Data:150\", _(\"Edu", "_(\"Ref Check4 Status\") + \":Data:150\", _(\"Civil Check1 Status\") + \":Data:150\",", "ID Check5\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "emp = frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id}) if emp.status !=", "cg = frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1 == 1: emp", "cg.employment_check3 == 1: emp = frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id})", "[vemp.status] else: row += [\"-\"] if cg.address_check3 == 1: emp", "+= [\"-\"] if cg.address_check4 == 1: emp = frappe.get_doc(\"Address Check4\",", "else: row += [\"-\"] if cg.reference_check2 == 1: emp =", "= frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "return columns, data def get_columns(): columns = [ _(\"Project Name\")", "vemp = frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\": app.ref_id}) row +=", "frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "vemp = frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\": app.ref_id}) row +=", "_(\"Add Check4 Status\") + \":Data:150\", _(\"Family Check Status\") + \":Data:150\",", "frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "def applicants(filters): applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app", "+ \":Data:150\", _(\"Ref Check2 Status\") + \":Data:150\", _(\"Ref Check3 Status\")", "emp = frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id}) if emp.status !=", "app.ref_id}): emp = frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "Check4 Status\") + \":Data:150\", _(\"Family Check Status\") + \":Data:150\", _(\"Ref", "= frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "[\"-\"] if cg.employment_check4 == 1: emp = frappe.get_doc(\"Employment Check4\", {", "= frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "emp = frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id}) if emp.status !=", "row += [\"-\"] if cg.id_check6 == 1: emp = frappe.get_doc(\"ID", "+ \":Data:150\", ] return columns def applicants(filters): applicant = frappe.db.sql(", "1: emp = frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id}) if emp.status", "app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date between %(start_date)s and %(end_date)s", "[emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\": app.ref_id})", "[\"-\"] if cg.criminal_check == 1: emp = frappe.get_doc(\"Criminal Check\", {", "[\"-\"] if cg.id_check6 == 1: emp = frappe.get_doc(\"ID Check6\", {", "row += [\"-\"] if cg.address_check4 == 1: emp = frappe.get_doc(\"Address", "+ \":Data:150\", _(\"Add Check2 Status\") + \":Data:150\", _(\"Add Check3 Status\")", "= frappe.get_doc(\"Verify Education Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "= frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check1\", {", "frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "Status\") + \":Data:150\", _(\"Emp Check3 Status\") + \":Data:150\", _(\"Emp Check4", "= frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "= frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check1\",", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\":", "if emp.status != \"Allocation Completed\": row += [emp.status] else: vemp", "row += [vemp.status] else: row += [\"-\"] if cg.employment_check2 ==", "row += [\"-\"] if cg.civil_check == 1: emp = frappe.get_doc(\"Civil", "+ \":Data:150\", _(\"Edu Check1 Status\") + \":Data:150\", _(\"Edu Check2 Status\")", "row = [] filters applicant = applicants(filters) for app in", "\":Data:150\", _(\"Add Check4 Status\") + \":Data:150\", _(\"Family Check Status\") +", "if cg.criminal_check == 1: emp = frappe.get_doc(\"Criminal Check\", { \"applicant_id\":", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Criminal Check\", {", "1: emp = frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id}) if emp.status", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check6\", {", "+= [vemp.status] else: row += [\"-\"] if cg.address_check2 == 1:", "_(\"ID Check2 Status\") + \":Data:150\", _(\"ID Check3 Status\") + \":Data:150\",", "+= [vemp.status] else: row += [\"-\"] if cg.criminal_check == 1:", "= frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "get_columns(): columns = [ _(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS Ref.", "_(\"Add Check1 Status\") + \":Data:150\", _(\"Add Check2 Status\") + \":Data:150\",", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check1\",", "[vemp.status] else: row += [\"-\"] if cg.education_check3 == 1: emp", "Check1 Status\") + \":Data:150\", _(\"ID Check2 Status\") + \":Data:150\", _(\"ID", "Status\") + \":Data:150\", _(\"Add Check3 Status\") + \":Data:150\", _(\"Add Check4", "if cg.id_check5 == 1: emp = frappe.get_doc(\"ID Check5\", { \"applicant_id\":", "else: vemp = frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\": app.ref_id}) row", "by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\": filters.get(\"to_date\") }, as_dict=1) return", "[emp.status] else: vemp = frappe.get_doc(\"Verify Address Check4\", { \"applicant_id\": app.ref_id})", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\":", "+= [vemp.status] else: row += [\"-\"] if cg.reference_check3 == 1:", "[\"-\"] if cg.reference_check3 == 1: emp = frappe.get_doc(\"Reference Check3\", {", "Employment Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check4\",", "== 1: emp = frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id}) if", "frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "row += [\"-\"] if cg.reference_check4 == 1: emp = frappe.get_doc(\"Reference", "Employment Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "+= [vemp.status] else: row += [\"-\"] if cg.address_check4 == 1:", "= frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "row += [\"-\"] if cg.education_check4 == 1: emp = frappe.get_doc(\"Education", "== 1: emp = frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id}) if", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\": app.ref_id})", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check1\", {", "_(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS Ref. No\") + \":Data:150\", _(\"Candidate", "cg.reference_check3 == 1: emp = frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id})", "== 1: emp = frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id}) if", "VHRS and contributors # For license information, please see license.txt", "+= [\"-\"] if cg.id_check1 == 1: emp = frappe.get_doc(\"ID Check1\",", "_(\"Emp Check4 Status\") + \":Data:150\", _(\"Edu Check1 Status\") + \":Data:150\",", "Check4 Status\") + \":Data:150\", _(\"ID Check5 Status\") + \":Data:150\", _(\"ID", "row += [\"-\"] if cg.employment_check4 == 1: emp = frappe.get_doc(\"Employment", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\":", "== 1: emp = frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id}) if", "Status\") + \":Data:150\", _(\"Edu Check4 Status\") + \":Data:150\", _(\"Add Check1", "1: emp = frappe.get_doc(\"Address Check3\", { \"applicant_id\": app.ref_id}) if emp.status", "row += [vemp.status] else: row += [\"-\"] if cg.id_check5 ==", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check1\",", "+= [\"-\"] if cg.reference_check3 == 1: emp = frappe.get_doc(\"Reference Check3\",", "Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check4\",", "== 1: emp = frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id}) if", "frappe.get_doc(\"Education Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "== 1: emp = frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id}) if", "row += [vemp.status] else: row += [\"-\"] if cg.reference_check4 ==", "Reference Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\":", "+= [\"-\"] if cg.id_check2 == 1: emp = frappe.get_doc(\"ID Check2\",", "Group Name\") + \":Data:150\", _(\"Emp Check1 Status\") + \":Data:150\", _(\"Emp", "Check\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "columns = [ _(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS Ref. No\")", "1: emp = frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id}) if emp.status", "frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "_(\"Candidate Name\") + \":Data:180\", _(\"Start Date\") + \":Date:150\", _(\"Status\") +", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\": app.ref_id})", "[emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\": app.ref_id})", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.employment_check4", "+= [\"-\"] if cg.employment_check3 == 1: emp = frappe.get_doc(\"Employment Check3\",", "[emp.status] else: vemp = frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\": app.ref_id})", "emp = frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "== 1: emp = frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id}) if", "Check1 Status\") + \":Data:150\", _(\"Add Check2 Status\") + \":Data:150\", _(\"Add", "Check4 Status\") + \":Data:150\", _(\"Civil Check1 Status\") + \":Data:150\", _(\"Criminal", "frappe.utils import (cint, cstr, date_diff, flt, getdate, money_in_words, nowdate, rounded,", "== 1: emp = frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id}) if", "Check3 Status\") + \":Data:150\", _(\"ID Check4 Status\") + \":Data:150\", _(\"ID", "emp = frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "Address Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "else: row += [\"-\"] if cg.civil_check == 1: emp =", "+= [vemp.status] else: row += [\"-\"] data.append(row) return columns, data", "where app.in_date between %(start_date)s and %(end_date)s order by app.in_date\"\"\", {", "Check1\", { \"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education Check1\", { \"applicant_id\":", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check1\",", "1: emp = frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id}) if emp.status", "== 1: emp = frappe.get_doc(\"Employment Check3\", { \"applicant_id\": app.ref_id}) if", "\":Data:150\", _(\"ID Check3 Status\") + \":Data:150\", _(\"ID Check4 Status\") +", "_(\"ID Check1 Status\") + \":Data:150\", _(\"ID Check2 Status\") + \":Data:150\",", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\":", "1: emp = frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "+ \":Data:150\", _(\"Checks Group Name\") + \":Data:150\", _(\"Emp Check1 Status\")", "Check3 Status\") + \":Data:150\", _(\"Ref Check4 Status\") + \":Data:150\", _(\"Civil", "+= [vemp.status] else: row += [\"-\"] if cg.employment_check3 == 1:", "frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "== 1: emp = frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id}) if", "row += [vemp.status] else: row += [\"-\"] if cg.reference_check1 ==", "= frappe.get_doc(\"Verify ID Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+ \":Data:150\", _(\"ID Check3 Status\") + \":Data:150\", _(\"ID Check4 Status\")", "filters applicant = applicants(filters) for app in applicant: row =", "[\"-\"] if cg.reference_check2 == 1: emp = frappe.get_doc(\"Reference Check2\", {", "frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "Status\") + \":Data:150\", _(\"Civil Check1 Status\") + \":Data:150\", _(\"Criminal Check2", "[emp.status] else: vemp = frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\": app.ref_id})", "[emp.status] else: vemp = frappe.get_doc(\"Verify Address Check1\", { \"applicant_id\": app.ref_id})", "else: row += [\"-\"] if cg.reference_check3 == 1: emp =", "else: row += [\"-\"] if cg.education_check3 == 1: emp =", "frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "+= [\"-\"] if cg.address_check2 == 1: emp = frappe.get_doc(\"Address Check2\",", "vemp = frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\": app.ref_id}) row +=", "row += [\"-\"] if cg.address_check1 == 1: emp = frappe.get_doc(\"Address", "== 1: emp = frappe.get_doc(\"Family Check1\", { \"applicant_id\": app.ref_id}) if", "[vemp.status] else: row += [\"-\"] if cg.id_check4 == 1: emp", "(cint, cstr, date_diff, flt, getdate, money_in_words, nowdate, rounded, today) from", "_(\"Edu Check2 Status\") + \":Data:150\", _(\"Edu Check3 Status\") + \":Data:150\",", "Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "+= [\"-\"] if cg.address_check3 == 1: emp = frappe.get_doc(\"Address Check3\",", "+= [\"-\"] if cg.reference_check4 == 1: emp = frappe.get_doc(\"Reference Check4\",", "Reference Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "== 1: emp = frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id}) if", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\": app.ref_id})", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check4\",", "Family Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "= [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if app.status !=", "Civil Check\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "if cg.id_check1 == 1: emp = frappe.get_doc(\"ID Check1\", { \"applicant_id\":", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.reference_check2", "+ \":Data:150\", _(\"Criminal Check2 Status\") + \":Data:150\", _(\"ID Check1 Status\")", "row += [vemp.status] else: row += [\"-\"] if cg.id_check4 ==", "[emp.status] else: vemp = frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\": app.ref_id})", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference", "= frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "2013, VHRS and contributors # For license information, please see", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check4\", {", "= frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check2\",", "[vemp.status] else: row += [\"-\"] if cg.reference_check4 == 1: emp", "+= [vemp.status] else: row += [\"-\"] if cg.id_check1 == 1:", "= frappe.get_doc(\"Verify Education Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\":", "[vemp.status] else: row += [\"-\"] if cg.criminal_check == 1: emp", "_(\"Ref Check1 Status\") + \":Data:150\", _(\"Ref Check2 Status\") + \":Data:150\",", "= frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "[vemp.status] else: row += [\"-\"] if cg.education_check2 == 1: emp", "else: vemp = frappe.get_doc(\"Verify Family Check1\", { \"applicant_id\": app.ref_id}) row", "Check2 Status\") + \":Data:150\", _(\"Edu Check3 Status\") + \":Data:150\", _(\"Edu", "cg.address_check4 == 1: emp = frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id})", "Education Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "else: vemp = frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\": app.ref_id}) row", "money_in_words, nowdate, rounded, today) from datetime import datetime from datetime", "+= [vemp.status] else: row += [\"-\"] if cg.reference_check1 == 1:", "[vemp.status] else: row += [\"-\"] if cg.education_check1 == 1: if", "row += [\"-\"] if cg.id_check2 == 1: emp = frappe.get_doc(\"ID", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check6", "Education Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "row += [\"-\"] if cg.id_check5 == 1: emp = frappe.get_doc(\"ID", "Check3 Status\") + \":Data:150\", _(\"Add Check4 Status\") + \":Data:150\", _(\"Family", "+= [vemp.status] else: row += [\"-\"] if cg.education_check3 == 1:", "Check2 Status\") + \":Data:150\", _(\"ID Check1 Status\") + \":Data:150\", _(\"ID", "= frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "from `tabApplicant` app where app.in_date between %(start_date)s and %(end_date)s order", "vemp = frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\": app.ref_id}) row +=", "else: vemp = frappe.get_doc(\"Verify Reference Check2\", { \"applicant_id\": app.ref_id}) row", "= frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "+ \":Data:150\", _(\"Ref Check4 Status\") + \":Data:150\", _(\"Civil Check1 Status\")", "__future__ import unicode_literals import frappe from frappe import _, msgprint", "1: emp = frappe.get_doc(\"Criminal Check\", { \"applicant_id\": app.ref_id}) if emp.status", "= frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "please see license.txt from __future__ import unicode_literals import frappe from", "cg.education_check1 == 1: if frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}): emp", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.education_check2", "[\"-\"] if cg.address_check2 == 1: emp = frappe.get_doc(\"Address Check2\", {", "row += [vemp.status] else: row += [\"-\"] if cg.civil_check ==", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check3\", { \"applicant_id\":", "if cg.id_check4 == 1: emp = frappe.get_doc(\"ID Check4\", { \"applicant_id\":", "= frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "cg.id_check6 == 1: emp = frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id})", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\":", "vemp = frappe.get_doc(\"Verify Civil Check\", { \"applicant_id\": app.ref_id}) row +=", "= frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "else: row += [\"-\"] data.append(row) return columns, data def get_columns():", "def get_columns(): columns = [ _(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS", "[emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check4\", { \"applicant_id\": app.ref_id})", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check5\", { \"applicant_id\":", "row += [vemp.status] else: row += [\"-\"] if cg.employment_check4 ==", "[vemp.status] else: row += [\"-\"] if cg.address_check1 == 1: emp", "+ \":Data:150\", _(\"Add Check4 Status\") + \":Data:150\", _(\"Family Check Status\")", "if app.status != \"Entry Pending\": cg = frappe.get_doc(\"Checks Group\", app.checks_group)", "datetime from calendar import monthrange def execute(filters=None): columns = get_columns()", "\":Data:150\", _(\"Emp Check1 Status\") + \":Data:150\", _(\"Emp Check2 Status\") +", "Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "else: row += [\"-\"] if cg.employment_check4 == 1: emp =", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check2\", {", "emp.status != \"Allocation Completed\": row += [emp.status] else: vemp =", "emp = frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "= frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "emp = frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id}) if emp.status !=", "else: vemp = frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\": app.ref_id}) row", "Pending\": cg = frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1 == 1:", "emp = frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id}) if emp.status !=", "else: row += [\"-\"] if cg.id_check2 == 1: emp =", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.id_check1", "[] row = [] filters applicant = applicants(filters) for app", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check4\", {", "flt, getdate, money_in_words, nowdate, rounded, today) from datetime import datetime", "= frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "row += [vemp.status] else: row += [\"-\"] if cg.reference_check3 ==", "emp = frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id}) if emp.status !=", "[\"-\"] if cg.education_check4 == 1: emp = frappe.get_doc(\"Education Check4\", {", "vemp = frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\": app.ref_id}) row +=", "[vemp.status] else: row += [\"-\"] if cg.family_check1 == 1: emp", "[emp.status] else: vemp = frappe.get_doc(\"Verify Criminal Check\", { \"applicant_id\": app.ref_id})", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify ID Check3\", {", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\":", "row += [vemp.status] else: row += [\"-\"] if cg.address_check1 ==", "ID Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "[] filters applicant = applicants(filters) for app in applicant: row", "emp = frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id}) if emp.status !=", "cg.civil_check == 1: emp = frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id})", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check4\", {", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address", "Status\") + \":Data:150\", _(\"Edu Check1 Status\") + \":Data:150\", _(\"Edu Check2", "applicant = frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date", "[vemp.status] else: row += [\"-\"] if cg.id_check1 == 1: emp", "frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.reference_check4", "Status\") + \":Data:150\", _(\"ID Check2 Status\") + \":Data:150\", _(\"ID Check3", "app in applicant: row = [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status,", "frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "Status\") + \":Data:150\", _(\"ID Check4 Status\") + \":Data:150\", _(\"ID Check5", "\":Data:150\", _(\"Family Check Status\") + \":Data:150\", _(\"Ref Check1 Status\") +", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check3\",", "[\"-\"] if cg.civil_check == 1: emp = frappe.get_doc(\"Civil Check\", {", "= frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "+= [\"-\"] if cg.civil_check == 1: emp = frappe.get_doc(\"Civil Check\",", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check3\", {", "vemp = frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\": app.ref_id}) row +=", "[emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\": app.ref_id})", "[emp.status] else: vemp = frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\": app.ref_id})", "Check1 Status\") + \":Data:150\", _(\"Emp Check2 Status\") + \":Data:150\", _(\"Emp", "+= [\"-\"] if cg.education_check4 == 1: emp = frappe.get_doc(\"Education Check4\",", "frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "[vemp.status] else: row += [\"-\"] if cg.reference_check3 == 1: emp", "frappe.get_doc(\"ID Check6\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "+ \":Data:150\", _(\"Ref Check1 Status\") + \":Data:150\", _(\"Ref Check2 Status\")", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check3\", {", "if cg.education_check1 == 1: if frappe.db.exists(\"Education Check1\", { \"applicant_id\": app.ref_id}):", "!= \"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify", "1: emp = frappe.get_doc(\"Education Check4\", { \"applicant_id\": app.ref_id}) if emp.status", "frappe.get_doc(\"Verify Employment Check1\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "_(\"Emp Check3 Status\") + \":Data:150\", _(\"Emp Check4 Status\") + \":Data:150\",", "_(\"ID Check5 Status\") + \":Data:150\", _(\"ID Check6 Status\") + \":Data:150\",", "\"Allocation Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment", "else: vemp = frappe.get_doc(\"Verify Education Check2\", { \"applicant_id\": app.ref_id}) row", "else: vemp = frappe.get_doc(\"Verify Reference Check1\", { \"applicant_id\": app.ref_id}) row", "+= [vemp.status] else: row += [\"-\"] if cg.id_check3 == 1:", "today) from datetime import datetime from datetime import date import", "+= [\"-\"] if cg.education_check1 == 1: if frappe.db.exists(\"Education Check1\", {", "else: vemp = frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\": app.ref_id}) row", "= frappe.get_doc(\"Employment Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "from frappe import _, msgprint from frappe.utils import (cint, cstr,", "in applicant: row = [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group]", "Status\") + \":Data:150\", _(\"Edu Check2 Status\") + \":Data:150\", _(\"Edu Check3", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.criminal_check", "[app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if app.status != \"Entry", "_(\"Ref Check3 Status\") + \":Data:150\", _(\"Ref Check4 Status\") + \":Data:150\",", "row += [\"-\"] if cg.employment_check2 == 1: emp = frappe.get_doc(\"Employment", "else: row += [\"-\"] if cg.criminal_check == 1: emp =", "ID Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "row += [vemp.status] else: row += [\"-\"] if cg.education_check3 ==", "1: emp = frappe.get_doc(\"Address Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "[\"-\"] if cg.employment_check3 == 1: emp = frappe.get_doc(\"Employment Check3\", {", "app.status, app.checks_group] if app.status != \"Entry Pending\": cg = frappe.get_doc(\"Checks", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check3\", { \"applicant_id\":", "[\"-\"] if cg.education_check3 == 1: emp = frappe.get_doc(\"Education Check3\", {", "1: emp = frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id}) if emp.status", "row += [vemp.status] else: row += [\"-\"] if cg.reference_check2 ==", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Education Check3\",", "row += [vemp.status] else: row += [\"-\"] if cg.employment_check3 ==", "+= [vemp.status] else: row += [\"-\"] if cg.employment_check4 == 1:", "= frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "data def get_columns(): columns = [ _(\"Project Name\") + \":Link/Customer:200\",", "applicant: row = [app.customer, app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if", "[emp.status] else: vemp = frappe.get_doc(\"Verify Education Check4\", { \"applicant_id\": app.ref_id})", "+= [\"-\"] if cg.reference_check1 == 1: emp = frappe.get_doc(\"Reference Check1\",", "1: emp = frappe.get_doc(\"Address Check2\", { \"applicant_id\": app.ref_id}) if emp.status", "+ \":Data:150\", _(\"ID Check2 Status\") + \":Data:150\", _(\"ID Check3 Status\")", "app.ref_id, app.candidate_name, app.in_date, app.status, app.checks_group] if app.status != \"Entry Pending\":", "+= [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check3\", { \"applicant_id\":", "if cg.address_check4 == 1: emp = frappe.get_doc(\"Address Check4\", { \"applicant_id\":", "\":Data:150\", _(\"Edu Check1 Status\") + \":Data:150\", _(\"Edu Check2 Status\") +", "import datetime from calendar import monthrange def execute(filters=None): columns =", "1: emp = frappe.get_doc(\"ID Check5\", { \"applicant_id\": app.ref_id}) if emp.status", "\"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id}) if", "1: emp = frappe.get_doc(\"Reference Check1\", { \"applicant_id\": app.ref_id}) if emp.status", "1: emp = frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id}) if emp.status", "frappe.get_doc(\"ID Check1\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "\":Date:150\", _(\"Status\") + \":Data:150\", _(\"Checks Group Name\") + \":Data:150\", _(\"Emp", "+= [vemp.status] else: row += [\"-\"] if cg.education_check1 == 1:", "{ \"applicant_id\": app.ref_id}): emp = frappe.get_doc(\"Education Check1\", { \"applicant_id\": app.ref_id})", "app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\": filters.get(\"to_date\") }, as_dict=1) return applicant", "\"Entry Pending\": cg = frappe.get_doc(\"Checks Group\", app.checks_group) if cg.employment_check1 ==", "if cg.civil_check == 1: emp = frappe.get_doc(\"Civil Check\", { \"applicant_id\":", "cg.reference_check2 == 1: emp = frappe.get_doc(\"Reference Check2\", { \"applicant_id\": app.ref_id})", "else: row += [\"-\"] if cg.address_check1 == 1: emp =", "= frappe.get_doc(\"Verify Employment Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status]", "else: vemp = frappe.get_doc(\"Verify ID Check1\", { \"applicant_id\": app.ref_id}) row", "if cg.reference_check1 == 1: emp = frappe.get_doc(\"Reference Check1\", { \"applicant_id\":", "= frappe.get_doc(\"ID Check3\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "_(\"ID Check4 Status\") + \":Data:150\", _(\"ID Check5 Status\") + \":Data:150\",", "\":Data:150\", _(\"Emp Check2 Status\") + \":Data:150\", _(\"Emp Check3 Status\") +", "frappe.get_doc(\"Employment Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\":", "+= [\"-\"] if cg.reference_check2 == 1: emp = frappe.get_doc(\"Reference Check2\",", "1: emp = frappe.get_doc(\"Civil Check\", { \"applicant_id\": app.ref_id}) if emp.status", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.address_check3", "frappe import _, msgprint from frappe.utils import (cint, cstr, date_diff,", "[vemp.status] else: row += [\"-\"] data.append(row) return columns, data def", "+= [vemp.status] else: row += [\"-\"] if cg.education_check4 == 1:", "[\"-\"] if cg.address_check3 == 1: emp = frappe.get_doc(\"Address Check3\", {", "Check6\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row", "= frappe.get_doc(\"Education Check2\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "[vemp.status] else: row += [\"-\"] if cg.id_check2 == 1: emp", "cg.employment_check2 == 1: emp = frappe.get_doc(\"Employment Check2\", { \"applicant_id\": app.ref_id})", "{ \"applicant_id\": app.ref_id}) if emp.status != \"Allocation Completed\": row +=", "frappe.db.sql( \"\"\"select app.checks_group,app.customer,app.ref_id,app.candidate_name,app.in_date,app.status from `tabApplicant` app where app.in_date between %(start_date)s", "%(end_date)s order by app.in_date\"\"\", { \"start_date\": filters.get(\"from_date\"), \"end_date\": filters.get(\"to_date\") },", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check2\", {", "emp = frappe.get_doc(\"Reference Check4\", { \"applicant_id\": app.ref_id}) if emp.status !=", "[vemp.status] else: row += [\"-\"] if cg.employment_check4 == 1: emp", "[ _(\"Project Name\") + \":Link/Customer:200\", _(\"VHRS Ref. No\") + \":Data:150\",", "\":Data:150\", _(\"Civil Check1 Status\") + \":Data:150\", _(\"Criminal Check2 Status\") +", "else: row += [\"-\"] if cg.id_check5 == 1: emp =", "1: emp = frappe.get_doc(\"ID Check4\", { \"applicant_id\": app.ref_id}) if emp.status", "Name\") + \":Data:180\", _(\"Start Date\") + \":Date:150\", _(\"Status\") + \":Data:150\",", "[\"-\"] if cg.id_check5 == 1: emp = frappe.get_doc(\"ID Check5\", {", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Address Check2\",", "emp = frappe.get_doc(\"ID Check2\", { \"applicant_id\": app.ref_id}) if emp.status !=", "Check4\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row +=", "import _, msgprint from frappe.utils import (cint, cstr, date_diff, flt,", "app.ref_id}) row += [vemp.status] else: row += [\"-\"] if cg.reference_check3", "Check2 Status\") + \":Data:150\", _(\"Ref Check3 Status\") + \":Data:150\", _(\"Ref", "Group\", app.checks_group) if cg.employment_check1 == 1: emp = frappe.get_doc(\"Employment Check1\",", "= frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id}) if emp.status != \"Allocation", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Employment Check2\",", "Status\") + \":Data:150\", _(\"Ref Check2 Status\") + \":Data:150\", _(\"Ref Check3", "frappe.get_doc(\"Verify ID Check6\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else:", "Status\") + \":Data:150\", _(\"Emp Check4 Status\") + \":Data:150\", _(\"Edu Check1", "from datetime import datetime from datetime import date import datetime", "[vemp.status] else: row += [\"-\"] if cg.id_check5 == 1: emp", "1: emp = frappe.get_doc(\"Reference Check3\", { \"applicant_id\": app.ref_id}) if emp.status", "vemp = frappe.get_doc(\"Verify ID Check2\", { \"applicant_id\": app.ref_id}) row +=", "app.status != \"Entry Pending\": cg = frappe.get_doc(\"Checks Group\", app.checks_group) if", "Completed\": row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check2\",", "_(\"ID Check6 Status\") + \":Data:150\", ] return columns def applicants(filters):", "else: vemp = frappe.get_doc(\"Verify Address Check2\", { \"applicant_id\": app.ref_id}) row", "emp = frappe.get_doc(\"Address Check4\", { \"applicant_id\": app.ref_id}) if emp.status !=", "ID Check6\", { \"applicant_id\": app.ref_id}) row += [vemp.status] else: row", "Check2 Status\") + \":Data:150\", _(\"ID Check3 Status\") + \":Data:150\", _(\"ID", "row += [emp.status] else: vemp = frappe.get_doc(\"Verify Reference Check1\", {" ]
[ "if kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset =", "**kwargs): if kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset", "'./materials' datasets_dt = {} def register(name): def decorator(cls): datasets_dt[name] =", "import os DEFAULT_ROOT = './materials' datasets_dt = {} def register(name):", "os DEFAULT_ROOT = './materials' datasets_dt = {} def register(name): def", "cls return cls return decorator def make(name, **kwargs): if kwargs.get('root_path')", "None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset = datasets_dt[name](**kwargs) return dataset", "{} def register(name): def decorator(cls): datasets_dt[name] = cls return cls", "return decorator def make(name, **kwargs): if kwargs.get('root_path') is None: kwargs['root_path']", "make(name, **kwargs): if kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name)", "def make(name, **kwargs): if kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT,", "register(name): def decorator(cls): datasets_dt[name] = cls return cls return decorator", "datasets_dt = {} def register(name): def decorator(cls): datasets_dt[name] = cls", "= {} def register(name): def decorator(cls): datasets_dt[name] = cls return", "def register(name): def decorator(cls): datasets_dt[name] = cls return cls return", "datasets_dt[name] = cls return cls return decorator def make(name, **kwargs):", "<filename>dataset/dataset.py import os DEFAULT_ROOT = './materials' datasets_dt = {} def", "= cls return cls return decorator def make(name, **kwargs): if", "return cls return decorator def make(name, **kwargs): if kwargs.get('root_path') is", "cls return decorator def make(name, **kwargs): if kwargs.get('root_path') is None:", "def decorator(cls): datasets_dt[name] = cls return cls return decorator def", "decorator def make(name, **kwargs): if kwargs.get('root_path') is None: kwargs['root_path'] =", "kwargs.get('root_path') is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset = datasets_dt[name](**kwargs)", "DEFAULT_ROOT = './materials' datasets_dt = {} def register(name): def decorator(cls):", "is None: kwargs['root_path'] = os.path.join(DEFAULT_ROOT, name) dataset = datasets_dt[name](**kwargs) return", "= './materials' datasets_dt = {} def register(name): def decorator(cls): datasets_dt[name]", "decorator(cls): datasets_dt[name] = cls return cls return decorator def make(name," ]
[ ".comment import CommentParser from .protobuf import Protobuf from .proto_structures import", "line): line = line.strip().replace(' ', '') lindex = len('syntax=') rindex", "= len('syntax=') rindex = line.index(';') value = line[lindex:rindex].strip().replace('\"', \"\").replace(\"'\", \"\")", "class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if", "Protobuf from .proto_structures import Syntax class SyntaxParser(): @classmethod def parse_and_add(cls,", "rindex = line.index(';') value = line[lindex:rindex].strip().replace('\"', \"\").replace(\"'\", \"\") return value", "None: raise 'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod", "line = line.strip().replace(' ', '') lindex = len('syntax=') rindex =", "from .protobuf import Protobuf from .proto_structures import Syntax class SyntaxParser():", "lindex = len('syntax=') rindex = line.index(';') value = line[lindex:rindex].strip().replace('\"', \"\").replace(\"'\",", "parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list)", "', '') lindex = len('syntax=') rindex = line.index(';') value =", "SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax", "Protobuf, line, top_comment_list): if proto_obj.syntax is not None: raise 'multiple", "import Protobuf from .proto_structures import Syntax class SyntaxParser(): @classmethod def", "proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax is not None: raise", "line, top_comment_list): if proto_obj.syntax is not None: raise 'multiple syntax", "def parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line,", "proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list): value", "@classmethod def parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line) comments =", "= cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments)", "= CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments) return syntax @classmethod", "len('syntax=') rindex = line.index(';') value = line[lindex:rindex].strip().replace('\"', \"\").replace(\"'\", \"\") return", "top_comment_list) syntax = Syntax(value, comments) return syntax @classmethod def _get_syntax_value(cls,", "parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax is not None:", "import CommentParser from .protobuf import Protobuf from .proto_structures import Syntax", "raise 'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def", "= line.strip().replace(' ', '') lindex = len('syntax=') rindex = line.index(';')", "syntax @classmethod def _get_syntax_value(cls, line): line = line.strip().replace(' ', '')", "import Syntax class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf, line,", "Syntax class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list):", "cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments) return", "CommentParser from .protobuf import Protobuf from .proto_structures import Syntax class", "is not None: raise 'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line,", "from .comment import CommentParser from .protobuf import Protobuf from .proto_structures", "'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls,", "syntax = Syntax(value, comments) return syntax @classmethod def _get_syntax_value(cls, line):", "top_comment_list): value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax =", "def _get_syntax_value(cls, line): line = line.strip().replace(' ', '') lindex =", "line.strip().replace(' ', '') lindex = len('syntax=') rindex = line.index(';') value", "comments) return syntax @classmethod def _get_syntax_value(cls, line): line = line.strip().replace('", "from .proto_structures import Syntax class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj:", "def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax is not", ".proto_structures import Syntax class SyntaxParser(): @classmethod def parse_and_add(cls, proto_obj: Protobuf,", "top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line) comments", "proto_obj.syntax is not None: raise 'multiple syntax detected!' proto_obj.syntax =", "top_comment_list): if proto_obj.syntax is not None: raise 'multiple syntax detected!'", "syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line,", "return syntax @classmethod def _get_syntax_value(cls, line): line = line.strip().replace(' ',", "'') lindex = len('syntax=') rindex = line.index(';') value = line[lindex:rindex].strip().replace('\"',", "= Syntax(value, comments) return syntax @classmethod def _get_syntax_value(cls, line): line", "line, top_comment_list): value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax", "comments = CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments) return syntax", "CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value, comments) return syntax @classmethod def", "detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list):", "= cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list): value =", "cls.parse_syntax(line, top_comment_list) @classmethod def parse_syntax(cls, line, top_comment_list): value = cls._get_syntax_value(line)", "not None: raise 'multiple syntax detected!' proto_obj.syntax = cls.parse_syntax(line, top_comment_list)", "Syntax(value, comments) return syntax @classmethod def _get_syntax_value(cls, line): line =", "value = cls._get_syntax_value(line) comments = CommentParser.create_comment(line, top_comment_list) syntax = Syntax(value,", "@classmethod def _get_syntax_value(cls, line): line = line.strip().replace(' ', '') lindex", "_get_syntax_value(cls, line): line = line.strip().replace(' ', '') lindex = len('syntax=')", "if proto_obj.syntax is not None: raise 'multiple syntax detected!' proto_obj.syntax", "@classmethod def parse_and_add(cls, proto_obj: Protobuf, line, top_comment_list): if proto_obj.syntax is", ".protobuf import Protobuf from .proto_structures import Syntax class SyntaxParser(): @classmethod" ]
[ "= Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST']) def home(): return render_template('home.html')", "Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST']) def home(): return render_template('home.html') @core.route(\"/about\")", "methods=['GET', 'POST']) def home(): return render_template('home.html') @core.route(\"/about\") def about(): return", "import render_template, request, Blueprint core = Blueprint('core', __name__) @core.route(\"/\", methods=['GET',", "core = Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST']) def home(): return", "about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search(): search_str =", "def home(): return render_template('home.html') @core.route(\"/about\") def about(): return render_template('about.html') @core.route('/search',", "render_template, request, Blueprint core = Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST'])", "Blueprint core = Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST']) def home():", "home(): return render_template('home.html') @core.route(\"/about\") def about(): return render_template('about.html') @core.route('/search', methods=['GET',", "request, Blueprint core = Blueprint('core', __name__) @core.route(\"/\", methods=['GET', 'POST']) def", "methods=['GET', 'POST']) def search(): search_str = request.args.get('globalsearch') return render_template('search.html', search_str=search_str)", "'POST']) def home(): return render_template('home.html') @core.route(\"/about\") def about(): return render_template('about.html')", "render_template('home.html') @core.route(\"/about\") def about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def", "@core.route(\"/\", methods=['GET', 'POST']) def home(): return render_template('home.html') @core.route(\"/about\") def about():", "def about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search(): search_str", "@core.route('/search', methods=['GET', 'POST']) def search(): search_str = request.args.get('globalsearch') return render_template('search.html',", "@core.route(\"/about\") def about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search():", "<reponame>mgp-git/Flask<filename>IPL/app/core/views.py from flask import render_template, request, Blueprint core = Blueprint('core',", "from flask import render_template, request, Blueprint core = Blueprint('core', __name__)", "__name__) @core.route(\"/\", methods=['GET', 'POST']) def home(): return render_template('home.html') @core.route(\"/about\") def", "render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search(): search_str = request.args.get('globalsearch') return", "return render_template('home.html') @core.route(\"/about\") def about(): return render_template('about.html') @core.route('/search', methods=['GET', 'POST'])", "return render_template('about.html') @core.route('/search', methods=['GET', 'POST']) def search(): search_str = request.args.get('globalsearch')", "flask import render_template, request, Blueprint core = Blueprint('core', __name__) @core.route(\"/\"," ]
[ ") cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes", "create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) #", "It # wasn't informed of the irrelevant 'not-a-bucket-at-all' # setting.", "\"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\",", "( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name =", "def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content", "= client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"]", "if title: parameters[\"title\"] = title if data_source_name: data_source = DataSource.lookup(self._db,", "assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket,", "the buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for", "functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\",", "{S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), (", "False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def test_mirror(", "\"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ),", "\"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ], }, }", "scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier)", "import parameterized from ..mirror import MirrorUploader from ..model import (", "was set when the representation was 'mirrored' for rep in", "( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0,", "assert \"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\")", "Failed during completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as", "\"bucket\", \"Key\": \"books.mrc\", }, { \"Body\": \"Part 2\", \"UploadId\": 1,", "= self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\", scaled_size=601)) #", "epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A network failure is treated", "= os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _", "== upload.completed assert False == upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\")", "import botocore import pytest from botocore.exceptions import BotoCoreError, ClientError from", "dictionary :rtype: Dict \"\"\" if value: if settings: settings[key] =", "[ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\",", "{S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader = self._uploader(**buckets) # m =", "boto3 client set up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client", "assert buckets == uploader.buckets # get_bucket just does a lookup", "assert True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload):", "a transient error. response = dict( Error=dict( Code=401, Message=\"Bad credentials\",", "suite by removing all the buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method()", "( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\",", "MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\", None,", "final_mirror_url. assert ( \"final_mirror_url was called with bucket books-go, key", "= uploader.marc_file_url(library, lane, end_time, start_time) # Assert assert result ==", "Arrange uploader = self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket) #", "import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create and", "BotoCoreError, ClientError from mock import MagicMock from parameterized import parameterized", "): # Arrange uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name)", "uploader_class: (Optional) Custom class which will be used insted of", "\".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"},", "lookup in .buckets uploader.buckets[\"foo\"] = object() result = uploader.get_bucket(\"foo\") assert", "_representation(self): rep, ignore = create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, )", "= object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None,", "was called with bucket covers-go, key here.png\" == cover_rep.mirror_url )", "used for initializing an external integration :type: Optional[Dict] :return: New", "# Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration,", "= s3_uploader.split_url(url, unquote) # Assert assert result == expected_result def", "== expected_result @parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\",", "an epub\", ) epub_rep = epub.resource.representation assert None == epub_rep.mirrored_at", "\"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\",", "url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result ==", "uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) # Act result", "cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1,", "test_content_root(self, name, bucket, expected_result, region=None): # Arrange uploader = self._create_s3_uploader(region=region)", "\"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\",", "epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds < 10 def test_mirror_failure(self):", "\"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings =", "DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self,", "called with # the right arguments def mock_final_mirror_url(bucket, key): return", "# Act result = s3_uploader.sign_url(url) # Assert assert result ==", "[ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ],", "with this class must be the same as its #", "== epub_rep.mirrored_at assert None == epub_rep.mirror_exception # An S3 credential", "] ) def test_book_url( self, name, buckets, identifier, expected_result, extension=None,", "S3 uploader :rtype: S3Uploader \"\"\" if settings and \"username\" not", "ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\",", "# An S3 credential failure is treated as a transient", "assert False == MockMultipartS3Upload.aborted assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload):", "def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original = self._url #", "== MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception", "\"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1,", "\"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ),", "self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket) # Assert assert result", "self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class =", "uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier, open_access=open_access)", "to build S3 keys from parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg", "- rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True)", "), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), (", "addressing_style: (Optional) S3 addressing style :type addressing_style: Optional[string] :param settings:", "), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None,", "uploader.buckets[\"foo\"] == result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\",", "not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can specify a client", "data_source = DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region,", "0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ),", "False, ), ] ) def test_split_url(self, name, url, expected_result, unquote=True):", "assert [ { \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\":", "s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert", "\"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\",", "), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), (", "b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts)", "\"username\", \"password\"), ] ) def test_initialization(self, name, username, password): #", "assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds < 10", "pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client)", "0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ),", "registry, and it's # better if it's the same as", "ignore1], [data2, bucket2, key2, args2, ignore2], ] = s3.client.uploads #", "1\") upload.upload_part(\"Part 2\") upload.abort() assert [] == uploader.client.parts @pytest.mark.minio class", "with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as upload: assert []", "s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url", "S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args,", "{ S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"]", "from parameterized import parameterized from ..mirror import MirrorUploader from ..model", "assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # Because", "\"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the", "\"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\",", "\"Key\": \"books.mrc\", }, ] == uploader.client.parts assert 3 == upload.part_number", "Kwargs used for initializing an external integration :type: Optional[Dict] :return:", "uploader = self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\": identifier, \"open_access\": open_access}", "( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0,", "epub_rep.mirror_exception # An S3 credential failure is treated as a", "datetime_utc(2020, 1, 1, 0, 0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\",", "= client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name ==", "# Assert assert result == expected_result @parameterized.expand( [ ( \"s3_url\",", "url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if expiration_settings else {}", ") client_class = MagicMock() # Act S3Uploader(integration, client_class=client_class) # Assert", "assert service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id", "\"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ] ) def test_cover_image_url( self, name,", "# Arrange region = \"us-east-1\" bucket = \"bucket\" filename =", "key here.epub\" == epub_rep.mirror_url ) assert ( \"final_mirror_url was called", "boto3 client inside S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes the test", "expiration_settings, expected_expiration): # Arrange region = \"us-east-1\" bucket = \"bucket\"", "{ \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": { \"Parts\":", "build S3 keys from parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg ID\",", "\"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\",", "Act result = uploader.marc_file_url(library, lane, end_time, start_time) # Assert assert", "== upload.parts assert True == MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted", "= \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE,", "None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so", "as the name of the external # integration. assert S3Uploader.NAME", "Act result = uploader.content_root(bucket) # Assert assert result == expected_result", "<ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore =", "same as the name of the external # integration. assert", "encoding: utf-8 import functools import os from urllib.parse import urlsplit", "\"password\"), ] ) def test_initialization(self, name, username, password): # Arrange", "assert False == upload.completed assert False == upload.aborted upload.upload_part(\"Part 1\")", "edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep = epub.resource.representation uploader", "\"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\",", ") uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None ==", "not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result", "to those URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1 assert", "client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): \"\"\"Creates a new instance", "if extension: parameters[\"extension\"] = extension if title: parameters[\"title\"] = title", "( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result == expected_result def", "aborted = None def __init__(self, uploader, representation, mirror_to): self.parts =", "\"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "= create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client)", "is # was not set. assert None == epub_rep.mirror_url #", "addressing_style=None ): # Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style )", "pool = self._edition(with_license_pool=True) original = self._url # Create an SVG", "\"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": { \"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\":", "0, 0, 0), ), ] ) def test_marc_file_url( self, name,", "a lookup in .buckets uploader.buckets[\"foo\"] = object() result = uploader.get_bucket(\"foo\")", "to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key,", "def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload =", "Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client)", "upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed", "\"us-east-1\" bucket = \"bucket\" filename = \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket,", "..testing import DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest):", ") def test_cover_image_root( self, name, bucket, data_source_name, expected_result, scaled_size=None, region=None,", "= True def abort(self): MockMultipartS3Upload.aborted = True rep, ignore =", "upload.upload_part(\"Part 2\") upload.complete() assert [ { \"Bucket\": \"bucket\", \"Key\": \"books.mrc\",", "MinIO instance\"\"\" s3_client_class = None \"\"\"Factory function used for creating", "ignore2], ] = s3.client.uploads # Both representations have had .mirror_url", "DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create, ) from ..s3 import", "\"\" else None) assert \"config\" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self):", "\"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", ),", "( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\",", "data_source_name=None, title=None, region=None, open_access=True, ): # Arrange identifier = self._identifier(foreign_id=identifier)", "width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink,", "uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" )", "running MinIO instance\"\"\" s3_client_class = None \"\"\"Factory function used for", "pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it", "), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\",", "[]): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self,", "== uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\", \"\"),", "# Arrange book_title = \"1234567890\" book_content = \"1234567890\" identifier =", "if settings and \"username\" not in settings: self._add_settings_value( settings, \"username\",", "username != \"\" else None) assert aws_secret_access_key == (password if", "= dict( Error=dict( Code=401, Message=\"Bad credentials\", ) ) uploader.client.fail_with =", "\"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\",", "assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result == expected_result", "= self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader", "\"covers-go\" == bucket1 assert \"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE ==", "\"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ),", "upload.part_number assert [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\":", "settings: settings[key] = value else: settings = {key: value} return", "identifier, \"filename\", scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier", "assert aws_access_key_id == (username if username != \"\" else None)", "# Act result = uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size )", "\"Body\": \"Part 1\", \"UploadId\": 1, \"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\":", "\"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR,", "url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\"", "\"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\",", "\"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\",", "integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\" uploader", "# setting. assert buckets == uploader.buckets # get_bucket just does", "S3 keys from parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg ID\", 1234,", "data_source_name, identifier, filename, expected_result, scaled_size=None, region=None, ): # identifier =", "mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation,", "{ MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\",", "used insted of S3Uploader :type uploader_class: Optional[Type] :param region: (Optional)", "the .url_transform # attribute on the S3Uploader object. assert \"a", "= uploader.url(bucket, path) # Assert assert result == expected_result @parameterized.expand(", "\"Contents\" in response assert len(response[\"Contents\"]) == 1 [object] = response[\"Contents\"]", "args2, ignore2], ] = s3.client.uploads # Both representations have had", "string :param value: Value :type value: Any :return: Updated settings", "book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response", "the external # integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert (", ".buckets uploader.buckets[\"foo\"] = object() result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] ==", "datetime_utc(2020, 1, 2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1,", "( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ), ] ) def", "settings = {\"username\": username, \"password\": password} integration = self._external_integration( ExternalIntegration.S3,", "test_url( self, name, bucket, path, expected_result, region=None, addressing_style=None ): #", "Params={\"Bucket\": bucket, \"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep,", "on the S3Uploader object. assert \"a transform\" == uploader.url_transform @parameterized.expand(", "DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit,", "( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\",", "assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\",", "config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\" assert region_name ==", "(\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"),", "MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ]", "DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ] ) def test_cover_image_root( self,", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\",", "integration.password = settings.get(\"password\", \"password\") return integration def _add_settings_value(self, settings, key,", "\"password\") return integration def _add_settings_value(self, settings, key, value): \"\"\"Adds a", "( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\",", "None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ),", "False def upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True", "b\"PNG\" not in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed =", "expected_result, region=None): # Arrange uploader = self._create_s3_uploader(region=region) # Act result", "test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader,", "from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket in", "= self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value( settings,", "\"open_access\": open_access} if extension: parameters[\"extension\"] = extension if title: parameters[\"title\"]", "\"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] ) def test_sign_url(self, name,", "upload_class=MockMultipartS3Upload ) as upload: assert [] == upload.parts assert False", "\"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url = url +", "# Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader", "uploader = self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket) # Assert", "the right arguments def mock_final_mirror_url(bucket, key): return \"final_mirror_url was called", "S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True,", "xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\"", "upload.upload_part(\"Part 2\") upload.abort() assert [] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest):", "upload.completed assert False == upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert", "\"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\", \"the", "Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT,", "self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform # Act result =", "False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert \"Error!\" ==", "), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), (", "epub_rep = epub.resource.representation assert None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client)", "] ) def test_mirror( self, name, uploader_class, bucket_type, bucket_name, open_access,", "\"Lane\", datetime_utc(2020, 1, 1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), (", "setting. assert buckets == uploader.buckets # get_bucket just does a", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", ), (", "\"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0),", "addressing_style ) integration = self._integration(**settings) uploader_class = uploader_class or S3Uploader", "self.parts = [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False def", "settings[key] = value else: settings = {key: value} return settings", "import functools import os from urllib.parse import urlsplit import boto3", "# A network failure is treated as a transient error.", ") as upload: assert [] == upload.parts assert False ==", "= self._integration() uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def", "import ( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create, ) from", "settings.get(\"username\", \"username\") integration.password = settings.get(\"password\", \"password\") return integration def _add_settings_value(self,", "\"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\",", "bucket, library_name, expected_result, region=None ): # Arrange uploader = self._create_s3_uploader(region=region)", "have had .mirror_url set and been # mirrored to those", "S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, {", ":param settings: Kwargs used for initializing an external integration :type:", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\",", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\",", "args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds < 10 assert b\"i'm an", "2\") upload.abort() assert [] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand(", "create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self):", "be used insted of S3Uploader :type uploader_class: Optional[Type] :param region:", "assert [] == upload.parts assert False == upload.completed assert False", "it's the same as the name of the external #", "region=None, addressing_style=None ): # Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style", "self, name, bucket, library_name, expected_result, region=None ): # Arrange uploader", "\"/a-path\", \"https://a-bucket.com/a-path\", None, ), ] ) def test_url( self, name,", "def upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True def", "client set up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client =", "key %s\" % (bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url =", "self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self): uploader", "here.epub\" == epub_rep.mirror_url ) assert ( \"final_mirror_url was called with", "test suite by creating a boto3 client set up with", "), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST,", "boto3 import botocore import pytest from botocore.exceptions import BotoCoreError, ClientError", "integration = self._integration() uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client)", "\"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore =", "to be used instead of boto3's client class :type client_class:", "\"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\",", "S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP,", "{ bucket_type: bucket_name, } if settings: settings.update(buckets) else: settings =", "uploader :rtype: S3Uploader \"\"\" settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region", "\"https://a-bucket.com/a-path\", None, ), ] ) def test_url( self, name, bucket,", "True == MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert None ==", "\"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore = pool.add_link( Hyperlink.IMAGE,", "self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get(\"username\", \"username\") integration.password", "Act result = uploader.book_url(**parameters) # Assert assert result == expected_result", "url_transform # Act result = uploader.final_mirror_url(bucket, key) # Assert if", "the configured buckets. It # wasn't informed of the irrelevant", "# Arrange settings = {\"username\": username, \"password\": password} integration =", "by creating a boto3 client set up with MinIO credentials\"\"\"", "\"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0),", "not successfully uploaded, # final_mirror_url was never called and mirror_url", "\"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0),", "unquote) # Assert assert result == expected_result def test_mirror_one(self): edition,", "\"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ] ) def test_cover_image_root(", "1, \"MultipartUpload\": { \"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\":", "\"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "= self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets)", "== data2 assert \"books-go\" == bucket2 assert \"here.epub\" == key2", "expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ),", "MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting", "Optional[Type] :param region: (Optional) S3 region :type region: Optional[string] :param", "[ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ),", "cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1, args1, ignore1], [data2,", "= self._create_s3_uploader() # Act result = s3_uploader.split_url(url, unquote) # Assert", "Arrange book_title = \"1234567890\" book_content = \"1234567890\" identifier = Identifier(type=Identifier.ISBN,", ":type uploader_class: Optional[Type] :param region: (Optional) S3 region :type region:", "\"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\",", "if value: if settings: settings[key] = value else: settings =", "= self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response", "\"username\") integration.password = settings.get(\"password\", \"password\") return integration def _add_settings_value(self, settings,", "botocore import pytest from botocore.exceptions import BotoCoreError, ClientError from mock", "\"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\",", "\"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), ] )", "book. svg = \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\">", "test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub, ignore", ") def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username", "# Act result = uploader.final_mirror_url(bucket, key) # Assert if not", "S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] )", ".url_transform # attribute on the S3Uploader object. assert \"a transform\"", "else {} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename))", "class to use instead of boto3.client.\"\"\" integration = self._integration() uploader", "( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ),", "edition, pool = self._edition(with_license_pool=True) original = self._url # Create an", "\"username\" not in settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if", "1\") upload.upload_part(\"Part 2\") assert [ { \"Body\": \"Part 1\", \"UploadId\":", "), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\",", "DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False, ), ] ) def test_book_url(", "\"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ),", "test_mirror( self, name, uploader_class, bucket_type, bucket_name, open_access, settings=None ): #", "integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\" uploader = MirrorUploader.implementation(integration) assert", "= self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url =", "# m(unglueit, identifier, \"filename\", scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db,", "upload.representation assert \"bucket\" == upload.bucket assert \"books.mrc\" == upload.filename assert", "None # Failed during completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload", ":param client_class: (Optional) Custom class to be used instead of", "test suite by removing all the buckets from MinIO\"\"\" super(S3UploaderTest,", "100}, 100, ), ] ) def test_sign_url(self, name, expiration_settings, expected_expiration):", "New intance of S3 uploader :rtype: S3Uploader \"\"\" settings =", "2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0,", "self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) # Successful", "# Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response assert", "if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert", "( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] ) def test_sign_url(self,", "0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0),", "2}, ] == upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part", "= self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about the configured buckets.", "class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted = None def __init__(self,", "uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition,", "epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an", "Representation, create, ) from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client,", "code used to build S3 keys from parts.\"\"\" parts =", "integration def _add_settings_value(self, settings, key, value): \"\"\"Adds a value to", "settings def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ):", "), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\",", "filename, expected_result, scaled_size=None, region=None, ): # identifier = self._identifier(foreign_id=\"ABOOK\") #", "\"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ),", ":type region: Optional[string] :param addressing_style: (Optional) S3 addressing style :type", "= mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url)", "= settings.get(\"username\", \"username\") integration.password = settings.get(\"password\", \"password\") return integration def", "S3 uploader :rtype: S3Uploader \"\"\" settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION,", "\"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"),", "self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\" integration.password = \"<PASSWORD>\"", "never called and mirror_url is # was not set. assert", "# In both cases, mirror_url was set to the result", "name, bucket, library_name, expected_result, region=None ): # Arrange uploader =", "MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [ {", "uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader,", "def test_marc_file_root( self, name, bucket, library_name, expected_result, region=None ): #", "[ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\",", "] ) def test_initialization(self, name, username, password): # Arrange settings", "self, name, bucket, data_source_name, expected_result, scaled_size=None, region=None, ): # Arrange", "self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def", "s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert", "not client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class,", ") SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _,", "uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\",", "was 'mirrored' for rep in epub_rep, cover_rep: assert (utc_now() -", "\"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value,", "\"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"),", "value: Value :type value: Any :return: Updated settings dictionary :rtype:", "self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response assert len(response[\"Contents\"]) == 1 [object]", "content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type: bucket_name, } if", "None def __init__(self, uploader, representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed", "S3Uploader.key_join(parts) ) @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ),", "from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader,", "= open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source,", "self._create_s3_uploader(MockS3Client) # A network failure is treated as a transient", "completed = None aborted = None def __init__(self, uploader, representation,", "parameterized from ..mirror import MirrorUploader from ..model import ( DataSource,", "\"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE,", "\"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD =", "10 def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\"", "assert \"bucket\" == upload.bucket assert \"books.mrc\" == upload.filename assert 1", "== upload.parts assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception,", "\"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\" assert", "an epub\", ) epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) #", "== key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds", "expected_result, scaled_size=None, region=None, ): # identifier = self._identifier(foreign_id=\"ABOOK\") # buckets", "key) # Assert if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT ==", "self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) #", "uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self):", "DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ),", "== \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username", "\"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None,", "from ..testing import DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now class", "region=None, ): # Arrange uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db,", "as a transient error. response = dict( Error=dict( Code=401, Message=\"Bad", "Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition, pool =", "datetime_utc(2020, 1, 1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\",", "self._url) assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception #", "), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "}, } ] == uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client)", "used to build S3 keys from parts.\"\"\" parts = [\"Gutenberg\",", "\"/a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None,", "functools import os from urllib.parse import urlsplit import boto3 import", "result = uploader.marc_file_url(library, lane, end_time, start_time) # Assert assert result", "a boto3 client inside S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes the", "\"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None,", "\"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2,", "epub\", ) epub_rep = epub.resource.representation assert None == epub_rep.mirrored_at s3", "test_book_url( self, name, buckets, identifier, expected_result, extension=None, data_source_name=None, title=None, region=None,", "in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted", "self, name, bucket, path, expected_result, region=None, addressing_style=None ): # Arrange", "uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original", "client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id", "SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client =", "None == epub_rep.mirror_url # A bug in the code is", "\"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR,", "# final_mirror_url was never called and mirror_url is # was", "uploader = MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader) # The", "= pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", )", "content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted", "self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE,", "\"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2},", "of S3 uploader :rtype: S3Uploader \"\"\" if settings and \"username\"", "== bucket2 assert \"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"]", "), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\",", "test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting", "client class :type client_class: Optional[Type] :param: uploader_class: (Optional) Custom class", "uploader :param client_class: (Optional) Custom class to be used instead", "(bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url =", "[ ( \"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\",", "upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def", "s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as upload: assert [] ==", "\"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In both cases, mirror_url was set", "**settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act", "True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def", "assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration =", "= uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size ) # Assert assert", "# Act result = s3_uploader.split_url(url, unquote) # Assert assert result", "uploader_class, region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def test_names(self): #", "external integration :type: Optional[Dict] :return: New intance of S3 uploader", "class S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create and configure a simple", "insted of S3Uploader :type uploader_class: Optional[Type] :param region: (Optional) S3", "== S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id", "1, 2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0,", "( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\",", "uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert", "== upload.parts assert False == upload.completed assert False == upload.aborted", "epub.resource.representation assert None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock", "test_final_mirror_url( self, name, bucket, key, expected_result, url_transform=None, region=None ): #", "This S3Uploader knows about the configured buckets. It # wasn't", "= epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A network failure is", "= urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None \"\"\"boto3 client connected", "== uploader.url_transform ) assert result == expected_result def test_key_join(self): \"\"\"Test", "client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"] assert", "1 == upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep,", "is not treated as a transient error -- # the", "not set. assert None == epub_rep.mirror_url # A bug in", "assert 1 == upload.part_number assert [] == upload.parts assert 1", "), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\",", "def mock_final_mirror_url(bucket, key): return \"final_mirror_url was called with bucket %s,", "), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation assert None == cover_rep.mirrored_at", "name, bucket, path, expected_result, region=None, addressing_style=None ): # Arrange uploader", "b\"svg\" in data assert b\"PNG\" not in data def test_multipart_upload(self):", "): # Arrange identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets)", "the irrelevant 'not-a-bucket-at-all' # setting. assert buckets == uploader.buckets #", "0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ),", "2\") assert [ { \"Body\": \"Part 1\", \"UploadId\": 1, \"PartNumber\":", "uploader.marc_file_root(bucket, library) # Assert assert result == expected_result @parameterized.expand( [", "( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\",", "2\") assert [\"Part 1\", \"Part 2\"] == upload.parts assert True", "- cover_rep.mirrored_at).seconds < 10 assert b\"i'm an epub\" == data2", "= self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url)", "transform\" == uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\",", "aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config =", "rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [ { \"Body\":", "os from urllib.parse import urlsplit import boto3 import botocore import", "\"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ),", "bucket %s, key %s\" % (bucket, key) s3.final_mirror_url = mock_final_mirror_url", "settings and \"username\" not in settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER", "expected_result @parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ),", "assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In both cases, mirror_url was", "in data assert b\"PNG\" not in data def test_multipart_upload(self): class", "= False MockMultipartS3Upload.aborted = False def upload_part(self, content): self.parts.append(content) def", "**settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation,", "object() result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result @parameterized.expand( [", "True def abort(self): MockMultipartS3Upload.aborted = True rep, ignore = create(", "1\", \"Part 2\"] == upload.parts assert True == MockMultipartS3Upload.completed assert", "endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self):", "assert True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception @parameterized.expand( [", "] = s3.client.uploads # Both representations have had .mirror_url set", "a boto3 client set up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class()", ": 'thecovers'} # uploader = self._uploader(**buckets) # m = uploader.cover_image_url", "[ [data1, bucket1, key1, args1, ignore1], [data2, bucket2, key2, args2,", "test_key_join(self): \"\"\"Test the code used to build S3 keys from", "result == expected_result def test_key_join(self): \"\"\"Test the code used to", "(\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"),", "buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting =", "== expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ),", "os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ =", "( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\",", "== args[\"ContentType\"] assert b\"svg\" in data assert b\"PNG\" not in", "this class must be the same as its # key", "uploader, representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted", "self._integration(**settings) uploader_class = uploader_class or S3Uploader return uploader_class(integration, client_class=client_class) class", "(\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), (", "S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), (", "( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from", "\"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": { \"Parts\": [ {\"ETag\": \"etag\",", "expected_result @parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\",", "S3Uploader, S3UploaderConfiguration, ) from ..testing import DatabaseTest from ..util.datetime_helpers import", "rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort() assert [] == uploader.client.parts", "0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), (", "\"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def", "assert aws_secret_access_key == (password if password != \"\" else None)", "\"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In both", "!= \"\" else None) assert aws_secret_access_key == (password if password", "else None) assert \"config\" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You", "self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about the configured buckets. It", "= BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None", "s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url =", "\"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\"", "assert None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url", "uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload", "it's called with # the right arguments def mock_final_mirror_url(bucket, key):", "Arrange library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY:", "was set to the result of final_mirror_url. assert ( \"final_mirror_url", "= uploader.final_mirror_url(bucket, key) # Assert if not url_transform: assert (", "integration = self._integration(**settings) uploader_class = uploader_class or S3Uploader return uploader_class(integration,", "identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\", scaled_size=601))", "\"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\",", "test_names(self): # The NAME associated with this class must be", "response = self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]: bucket_name = bucket[\"Name\"]", "def test_initialization(self, name, username, password): # Arrange settings = {\"username\":", "Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert result", "uploader.url_transform = url_transform # Act result = uploader.final_mirror_url(bucket, key) #", "with bucket %s, key %s\" % (bucket, key) s3.final_mirror_url =", "\"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\"", ") if settings and \"password\" not in settings: self._add_settings_value( settings,", "uploader_class=None, region=None, addressing_style=None, **settings ): \"\"\"Creates a new instance of", "@parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 1,", "def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): \"\"\"Creates", "ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get(\"username\", \"username\") integration.password =", "0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), \"us-east-2\", ),", "2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0,", "0), ), ] ) def test_marc_file_url( self, name, bucket, library_name,", "\"bucket\", \"Key\": \"books.mrc\", }, ] == uploader.client.parts assert 3 ==", "\"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2,", "if expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url =", "m = uploader.cover_image_url # # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) #", "# Act result = uploader.book_url(**parameters) # Assert assert result ==", "was not set. assert None == epub_rep.mirror_url # A bug", "Key :type key: string :param value: Value :type value: Any", "response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response assert len(response[\"Contents\"]) ==", "# Act S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count == 2", "uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.marc_file_url(library, lane,", "\"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ),", "\"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\",", "media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload(", "= {key: value} return settings def _create_s3_uploader( self, client_class=None, uploader_class=None,", "addressing style :type addressing_style: Optional[string] :param settings: Kwargs used for", "in settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and", "= None def __init__(self, uploader, representation, mirror_to): self.parts = []", "\"books-go\" == bucket2 assert \"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE ==", "# This S3Uploader knows about the configured buckets. It #", "= None \"\"\"boto3 client connected to locally running MinIO instance\"\"\"", "assert ( \"final_mirror_url was called with bucket books-go, key here.epub\"", "False MockMultipartS3Upload.aborted = False def upload_part(self, content): self.parts.append(content) def complete(self):", "= \"1234567890\" book_content = \"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation", "assert (utc_now() - cover_rep.mirrored_at).seconds < 10 assert b\"i'm an epub\"", "book_title = \"1234567890\" book_content = \"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title)", "None, \"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT,", "\"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1,", "Act result = s3_uploader.split_url(url, unquote) # Assert assert result ==", "assert [] == upload.parts assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with =", "import MagicMock from parameterized import parameterized from ..mirror import MirrorUploader", "DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:", "mock import MagicMock from parameterized import parameterized from ..mirror import", "300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ),", "\"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ] ) def test_split_url(self, name,", "= uploader_class or S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "[data1, bucket1, key1, args1, ignore1], [data2, bucket2, key2, args2, ignore2],", "region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def test_names(self): # The", "= { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting = dict(buckets)", "up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\",", "content): raise Exception(\"Error!\") # Failed during upload with s3.multipart_upload( rep,", "_add_settings_value(self, settings, key, value): \"\"\"Adds a value to settings dictionary", "or S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get(", "with bucket books-go, key here.epub\" == epub_rep.mirror_url ) assert (", "if it's the same as the name of the external", "data_source = DataSource.lookup(self._db, data_source_name) # Act result = uploader.cover_image_root(bucket, data_source,", "return \"final_mirror_url was called with bucket %s, key %s\" %", ") ) uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None", "test_marc_file_url( self, name, bucket, library_name, lane_name, end_time, expected_result, start_time=None, region=None,", ":param addressing_style: (Optional) S3 addressing style :type addressing_style: Optional[string] :param", "\"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ] ) def test_split_url(self,", "test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange region = \"us-east-1\" bucket", "treated as a transient error. response = dict( Error=dict( Code=401,", "\"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None,", "0, 0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020,", "\"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, { \"Body\": \"Part", "\"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"),", "aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name", ":type value: Any :return: Updated settings dictionary :rtype: Dict \"\"\"", "removing all the buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response =", "identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = {", "data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1 assert \"here.png\" == key1 assert", "boto3.client.\"\"\" integration = self._integration() uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client,", "0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\",", "\"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\",", "def test_url( self, name, bucket, path, expected_result, region=None, addressing_style=None ):", "\"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1,", "= os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\"", "\"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ] ) def", "= self._integration(**settings) uploader_class = uploader_class or S3Uploader return uploader_class(integration, client_class=client_class)", "uploader.url_transform ) assert result == expected_result def test_key_join(self): \"\"\"Test the", "), ( \"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), (", "\"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\",", "for rep in epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds <", "client_class: Optional[Type] :param: uploader_class: (Optional) Custom class which will be", "and been # mirrored to those URLs. assert data1.startswith(b\"\\x89\") assert", "== expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020,", "identifier, \"open_access\": open_access} if extension: parameters[\"extension\"] = extension if title:", "# Both representations have had .mirror_url set and been #", "assert len(response[\"Contents\"]) == 1 [object] = response[\"Contents\"] assert object[\"Key\"] ==", "\"etag\", \"PartNumber\": 2}, ] == upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception,", "\"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ] ) def test_initialization(self, name,", "settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class = self.s3_client_class", "None, ), ] ) def test_url( self, name, bucket, path,", "rep in epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds < 10", "uploader.cover_image_url # # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier =", "1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\"", "1, 1, 0, 0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\",", "from parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"]", "\"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\",", "region :type region: Optional[string] :param addressing_style: (Optional) S3 addressing style", "bucket1, key1, args1, ignore1], [data2, bucket2, key2, args2, ignore2], ]", "= \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings", "treated as a transient error -- # the exception propagates", "\"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\",", "( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\",", "\"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ),", "= MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [", "buckets = { bucket_type: bucket_name, } if settings: settings.update(buckets) else:", "self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\", scaled_size=601)) # Arrange", "set up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client(", "), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] ) def", "== upload.part_number assert [] == upload.parts assert 1 == upload.upload.get(\"UploadId\")", "= \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore = pool.add_link(", "settings: Dict :param key: Key :type key: string :param value:", "] assert service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert", "\"\" else None) assert aws_secret_access_key == (password if password !=", "\"Body\": \"Part 2\", \"UploadId\": 1, \"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\":", "\"https://my-feed/SHORT/\"), ] ) def test_marc_file_root( self, name, bucket, library_name, expected_result,", "with bucket covers-go, key here.png\" == cover_rep.mirror_url ) # mirrored-at", "def __init__(self, uploader, representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed =", "\"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\") rep.mirror_exception", "with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ),", "), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\",", "to the result of final_mirror_url. assert ( \"final_mirror_url was called", "mirrored-at was set when the representation was 'mirrored' for rep", "client_class: (Optional) Custom class to be used instead of boto3's", "== args2[\"ContentType\"] # In both cases, mirror_url was set to", "== uploader.client.parts assert 3 == upload.part_number assert [ {\"ETag\": \"etag\",", "( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\",", "\"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\",", "), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\",", "== expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\",", "The URL_TEMPLATE_KEY setting becomes the .url_transform # attribute on the", "\"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if", "S3 uploader :param client_class: (Optional) Custom class to be used", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ), (", "we can verify that it's called with # the right", "\"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ), ] ) def test_url( self,", "'Upload' it to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data,", "\"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\",", "] ) def test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange region", "), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ), (", "open_access=True, ): # Arrange identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region,", "( \"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\",", "the exception propagates through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep,", "= MultipartS3Upload(uploader, rep, rep.url) assert uploader == upload.uploader assert rep", "def test_key_join(self): \"\"\"Test the code used to build S3 keys", "rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\") rep.mirror_exception = None", "service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id ==", "settings.update(buckets) else: settings = buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings", "client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\"", "= \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [", "name, url, expected_result, unquote=True): # Arrange s3_uploader = self._create_s3_uploader() #", "as upload: upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed assert True", "S3UploaderConfiguration, ) from ..testing import DatabaseTest from ..util.datetime_helpers import datetime_utc,", "DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source # Act result = uploader.book_url(**parameters) #", "identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE )", "1, 0, 0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\",", "URL_TEMPLATE_KEY setting becomes the .url_transform # attribute on the S3Uploader", "Any :return: Updated settings dictionary :rtype: Dict \"\"\" if value:", "Both representations have had .mirror_url set and been # mirrored", "Create an SVG cover for the book. svg = \"\"\"<!DOCTYPE", "uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets", "None, None, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "configured buckets. It # wasn't informed of the irrelevant 'not-a-bucket-at-all'", "class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ),", "= os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\"", "self, name, bucket, key, expected_result, url_transform=None, region=None ): # Arrange", "\"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\",", "\"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\",", "\"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now() -", "), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ] )", "credentials\", ) ) uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert", "TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME associated with this class", "= client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key =", "epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we can", "assert [] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ (", "S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\",", "\"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ),", "): # Arrange book_title = \"1234567890\" book_content = \"1234567890\" identifier", "{S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\",", "upload.parts assert True == MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert", "\".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT,", "content=svg, ) # 'Upload' it to S3. s3 = self._create_s3_uploader(MockS3Client)", "uploader = self._uploader(**buckets) # m = uploader.cover_image_url # # unglueit", "content=\"i'm an epub\", ) epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client)", "boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes the test suite by", "\"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"),", "host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader,", "class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception(\"Error!\") # Failed during", "\"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] ) def test_marc_file_root( self,", "region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if username !=", "s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url) # Assert", "..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create", "from ..model import ( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create,", "S3Uploader knows about the configured buckets. It # wasn't informed", "} if settings: settings.update(buckets) else: settings = buckets s3_uploader =", "[ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100},", "super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]: bucket_name", "upload.part_number assert [] == upload.parts assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with", "upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [\"Part 1\", \"Part 2\"] ==", "if not client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class,", "hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, )", "uploader.client.parts assert 3 == upload.part_number assert [ {\"ETag\": \"etag\", \"PartNumber\":", "@pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST,", "settings, S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style", "\"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url( self, name, bucket,", "parts = [\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"] assert (", "region=None ): # Arrange uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform", "= self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) assert uploader ==", "the same as its # key in the MirrorUploader implementation", "\"On Books\", \"us-east-3\", False, ), ] ) def test_book_url( self,", "\"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), (", "), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\",", "\"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\",", "lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region,", ":type: Optional[Dict] :return: New intance of S3 uploader :rtype: S3Uploader", "( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\",", "= data_source # Act result = uploader.book_url(**parameters) # Assert assert", "\"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE", "\"a transform\" uploader = MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader)", "\"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT,", "), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False,", "to locally running MinIO instance\"\"\" s3_client_class = None \"\"\"Factory function", "assert result == expected_result @parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0),", "key: string :param value: Value :type value: Any :return: Updated", "DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"},", "initializing an external integration :type: Optional[Dict] :return: New intance of", "os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" )", "informed of the irrelevant 'not-a-bucket-at-all' # setting. assert buckets ==", "def test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange region = \"us-east-1\"", "the name of the external # integration. assert S3Uploader.NAME ==", "(\"non_empty_string_credentials\", \"username\", \"password\"), ] ) def test_initialization(self, name, username, password):", "raise Exception(\"Error!\") # Failed during upload with s3.multipart_upload( rep, rep.url,", "\"\"\" if value: if settings: settings[key] = value else: settings", "\"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ),", "books-go, key here.epub\" == epub_rep.mirror_url ) assert ( \"final_mirror_url was", "'mirrored' for rep in epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds", "\"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\",", "aws_access_key_id == (username if username != \"\" else None) assert", "must be the same as its # key in the", "\"\"\"Adds a value to settings dictionary :param settings: Settings dictionary", "\"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\",", "= self._edition(with_license_pool=True) original = self._url # Create an SVG cover", "# Assert assert client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0] region_name", "failure is treated as a transient error. response = dict(", "assert None == epub_rep.mirror_url # A bug in the code", "1, 2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0,", "= self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we can verify that", "teardown_method(self): \"\"\"Deinitializes the test suite by removing all the buckets", "rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False ==", "\"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020,", "result = uploader.book_url(**parameters) # Assert assert result == expected_result @parameterized.expand(", ") cover_rep = cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location =", "in .buckets uploader.buckets[\"foo\"] = object() result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"]", "args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\"", "data_source, scaled_size=scaled_size) # Assert assert result == expected_result @parameterized.expand( [", "# Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert", "An S3 credential failure is treated as a transient error.", "key: Key :type key: string :param value: Value :type value:", "( \"final_mirror_url was called with bucket books-go, key here.epub\" ==", "] ) def test_split_url(self, name, url, expected_result, unquote=True): # Arrange", "data assert b\"PNG\" not in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload):", "None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ),", "] ) def test_content_root(self, name, bucket, expected_result, region=None): # Arrange", "( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE,", "): # identifier = self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY :", "# Arrange uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform", "): # Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style ) #", "\"Key\": \"books.mrc\", }, { \"Body\": \"Part 2\", \"UploadId\": 1, \"PartNumber\":", "\"password\": password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class", "} buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting)", "result = s3_uploader.split_url(url, unquote) # Assert assert result == expected_result", "1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ], }, } ] ==", "all the buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets()", "assert result == expected_result @parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\",", "= title if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] =", "601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\",", "( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100,", "\"final_mirror_url was called with bucket covers-go, key here.png\" == cover_rep.mirror_url", "Settings dictionary :type settings: Dict :param key: Key :type key:", "so we can verify that it's called with # the", "None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ),", "= uploader.cover_image_url # # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier", "= pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep =", "\"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\",", "if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source #", "return integration def _add_settings_value(self, settings, key, value): \"\"\"Adds a value", "assert S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] )", "library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket}", "url, expected_result, unquote=True): # Arrange s3_uploader = self._create_s3_uploader() # Act", "key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\",", "ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep", "self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we can verify that it's", "expected_result, url_transform=None, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) if", "\"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] ) def test_marc_file_root(", "\"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\",", "during upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload:", "for the book. svg = \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG", "S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ),", "== epub_rep.mirror_url ) assert ( \"final_mirror_url was called with bucket", "\"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\",", "file was not successfully uploaded, # final_mirror_url was never called", "expected_result @parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\",", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "self._uploader(**buckets) # m = uploader.cover_image_url # # unglueit = DataSource.lookup(self._db,", "\"us-east-2\", ), ( \"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ),", "original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep = epub.resource.representation", "(utc_now() - rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition, pool =", "A network failure is treated as a transient error. uploader.client.fail_with", "( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\",", "inside S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes the test suite by", "of S3Uploader :type uploader_class: Optional[Type] :param region: (Optional) S3 region", "s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename}, ) class", "settings=settings, ) client_class = MagicMock() # Act S3Uploader(integration, client_class=client_class) #", "S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count == 2 service_name =", "with MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER,", "\"final_mirror_url was called with bucket %s, key %s\" % (bucket,", "key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\", \"bucket\", \"the këy\",", "self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket, library) # Assert assert", ") def test_cover_image_url( self, name, buckets, data_source_name, identifier, filename, expected_result,", "1, 0, 0, 0), ), ] ) def test_marc_file_url( self,", "isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting becomes the .url_transform #", "BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None ==", "== args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds < 10 assert b\"i'm", "expected_result, start_time=None, region=None, ): # Arrange library = self._library(short_name=library_name) lane", "error. response = dict( Error=dict( Code=401, Message=\"Bad credentials\", ) )", "urllib.parse import urlsplit import boto3 import botocore import pytest from", "== upload.bucket assert \"books.mrc\" == upload.filename assert 1 == upload.part_number", "in the code is not treated as a transient error", "# Assert assert result == expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\",", "lane_name, end_time, expected_result, start_time=None, region=None, ): # Arrange library =", ":return: New intance of S3 uploader :rtype: S3Uploader \"\"\" settings", "{\"ETag\": \"etag\", \"PartNumber\": 2}, ], }, } ] == uploader.client.uploads", "settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and \"password\"", "3 == upload.part_number assert [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\":", "= s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\" in data", "MultipartS3Upload(uploader, rep, rep.url) assert uploader == upload.uploader assert rep ==", ") def test_marc_file_url( self, name, bucket, library_name, lane_name, end_time, expected_result,", "abort(self): MockMultipartS3Upload.aborted = True rep, ignore = create( self._db, Representation,", "S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\",", "\"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"),", "..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration,", "rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [ { \"Body\": \"Part", "( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), (", "arguments def mock_final_mirror_url(bucket, key): return \"final_mirror_url was called with bucket", "\"bucket\" filename = \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url", "[\"Part 1\", \"Part 2\"] == upload.parts assert True == MockMultipartS3Upload.completed", "from ..mirror import MirrorUploader from ..model import ( DataSource, ExternalIntegration,", "super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, )", "s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert", "s3_uploader = self._create_s3_uploader() # Act result = s3_uploader.split_url(url, unquote) #", "result == expected_result @parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT,", "self, name, buckets, data_source_name, identifier, filename, expected_result, scaled_size=None, region=None, ):", "result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\":", "ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source,", "uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.cover_image_url( data_source,", "book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response =", "def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload =", "goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock() # Act S3Uploader(integration, client_class=client_class)", "the code is not treated as a transient error --", "for initializing an external integration :type: Optional[Dict] :return: New intance", "\"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\",", "2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0,", "self, name, bucket, library_name, lane_name, end_time, expected_result, start_time=None, region=None, ):", "# Act result = uploader.marc_file_url(library, lane, end_time, start_time) # Assert", "an epub\" == data2 assert \"books-go\" == bucket2 assert \"here.epub\"", "), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), (", "botocore.exceptions import BotoCoreError, ClientError from mock import MagicMock from parameterized", "\".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False, ), ] ) def", "# Failed during upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload )", "of S3 uploader :param client_class: (Optional) Custom class to be", "datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1,", "if password != \"\" else None) assert \"config\" not in", "bug in the code is not treated as a transient", "bucket, \"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore", "client inside S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes the test suite", "{S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] ) def test_sign_url(self, name, expiration_settings,", "open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE,", "\"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\",", "self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader =", "assert uploader.buckets[\"foo\"] == result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\",", "result = s3_uploader.sign_url(url) # Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url)", "== isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting becomes the .url_transform", "@parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), (", "def _representation(self): rep, ignore = create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE,", "\"1234567890\" book_content = \"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation =", "1, \"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, ] ==", "= s3.client.uploads # Both representations have had .mirror_url set and", "original_cover_location = \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore =", "len(response[\"Contents\"]) == 1 [object] = response[\"Contents\"] assert object[\"Key\"] == \"ISBN/{0}.epub\".format(book_title)", "_, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None \"\"\"boto3", "\"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\",", "\"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, ] == uploader.client.parts", "( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ),", "\"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, { \"Body\": \"Part 2\", \"UploadId\":", "test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader,", ":type addressing_style: Optional[string] :param settings: Kwargs used for initializing an", "expected_result, scaled_size=None, region=None, ): # Arrange uploader = self._create_s3_uploader(region=region) data_source", "), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601,", "S3Uploader object. assert \"a transform\" == uploader.url_transform @parameterized.expand( [ (\"empty_credentials\",", "edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it to S3. s3", "self._url) [[data, bucket, key, args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE", "= \"us-east-1\" bucket = \"bucket\" filename = \"filename\" url =", "S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] =", "uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about the configured", "transient error -- # the exception propagates through. uploader.client.fail_with =", "upload.upload_part(\"Part 2\") assert [ { \"Body\": \"Part 1\", \"UploadId\": 1,", "assert result == expected_result @parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\",", ") def test_initialization(self, name, username, password): # Arrange settings =", "uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep,", "{S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ]", "= self._uploader(**buckets) # m = uploader.cover_image_url # # unglueit =", "aws_secret_access_key == (password if password != \"\" else None) assert", "ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting(", "= {\"username\": username, \"password\": password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL,", "integration.username = \"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value =", "\"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\",", "\"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] ) def", "book_content = \"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation(", "data_source_name, expected_result, scaled_size=None, region=None, ): # Arrange uploader = self._create_s3_uploader(region=region)", "s3_uploader.split_url(url, unquote) # Assert assert result == expected_result def test_mirror_one(self):", "= self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) # Act result =", "), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), (", "= [\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\"", "a simple S3 integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings", "1\") assert False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert", "\"bucket\", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader =", "# Act result = uploader.marc_file_root(bucket, library) # Assert assert result", "\"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1,", "\"config\" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can specify a", "# was not set. assert None == epub_rep.mirror_url # A", "= self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock() #", "\"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] ) def test_content_root(self, name, bucket,", ":param: uploader_class: (Optional) Custom class which will be used insted", "= self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket, library) # Assert", "will be used insted of S3Uploader :type uploader_class: Optional[Type] :param", "key): return \"final_mirror_url was called with bucket %s, key %s\"", "s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1, args1, ignore1], [data2, bucket2,", "upload: upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed assert True ==", "bucket = \"bucket\" filename = \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region,", "assert uploader == upload.uploader assert rep == upload.representation assert \"bucket\"", "S3 region :type region: Optional[string] :param addressing_style: (Optional) S3 addressing", "used instead of boto3's client class :type client_class: Optional[Type] :param:", "[ { \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": {", "bucket, path, expected_result, region=None, addressing_style=None ): # Arrange uploader =", "self._integration() uploader = S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self):", "), ] ) def test_book_url( self, name, buckets, identifier, expected_result,", "final_mirror_url was never called and mirror_url is # was not", "region: (Optional) S3 region :type region: Optional[string] :param addressing_style: (Optional)", ") @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), (", "\"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\" uploader = MirrorUploader.implementation(integration)", "assert [ { \"Body\": \"Part 1\", \"UploadId\": 1, \"PartNumber\": 1,", "= \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1,", "} ] == uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep", "class :type client_class: Optional[Type] :param: uploader_class: (Optional) Custom class which", "test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader,", "the test suite by creating a boto3 client set up", "== cover_rep.mirror_url ) # mirrored-at was set when the representation", "\"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] ) def test_content_root(self, name, bucket, expected_result,", "MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False,", "key, value): \"\"\"Adds a value to settings dictionary :param settings:", "\"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\",", "), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ),", "S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style )", "from botocore.exceptions import BotoCoreError, ClientError from mock import MagicMock from", "bucket2 assert \"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] #", "\"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\",", "test_initialization(self, name, username, password): # Arrange settings = {\"username\": username,", "expected_result, region=None, addressing_style=None ): # Arrange uploader = self._create_s3_uploader( region=region,", "settings, key, value): \"\"\"Adds a value to settings dictionary :param", "its # key in the MirrorUploader implementation registry, and it's", "\"\"\"Test the code used to build S3 keys from parts.\"\"\"", "s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args, ignore]] = s3.client.uploads assert", "class must be the same as its # key in", "= client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ]", "dictionary :param settings: Settings dictionary :type settings: Dict :param key:", "Assert assert result == expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\",", "== upload.part_number assert [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\",", "= s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name)", "path) # Assert assert result == expected_result @parameterized.expand( [ (", "assert b\"i'm an epub\" == data2 assert \"books-go\" == bucket2", "was called with bucket books-go, key here.epub\" == epub_rep.mirror_url )", "bucket_name, } if settings: settings.update(buckets) else: settings = buckets s3_uploader", "= Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type: bucket_name,", "\"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1, args1,", "MagicMock from parameterized import parameterized from ..mirror import MirrorUploader from", "-- # the exception propagates through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception,", "None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # An S3", "{} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url", "bucket_name, open_access, settings=None ): # Arrange book_title = \"1234567890\" book_content", "library) # Assert assert result == expected_result @parameterized.expand( [ (", "upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert", "result = uploader.url(bucket, path) # Assert assert result == expected_result", "), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ), ] )", "[\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" ==", "password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class =", "result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ),", "@parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), (", "the S3Uploader object. assert \"a transform\" == uploader.url_transform @parameterized.expand( [", "\"\"\"Factory function used for creating a boto3 client inside S3Uploader\"\"\"", "assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise", "self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\": identifier, \"open_access\":", "integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3]", "urlsplit import boto3 import botocore import pytest from botocore.exceptions import", "# Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert", "config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"]", "\"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ),", "\"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\",", "rep, ignore = create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return", "\"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False, ), ] )", "upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as upload: assert", "\"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ),", "uploader_class = uploader_class or S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest):", "(Optional) Custom class which will be used insted of S3Uploader", "Act result = uploader.marc_file_root(bucket, library) # Assert assert result ==", "def test_mirror( self, name, uploader_class, bucket_type, bucket_name, open_access, settings=None ):", "Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it to", "pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation", "Custom class which will be used insted of S3Uploader :type", "[data2, bucket2, key2, args2, ignore2], ] = s3.client.uploads # Both", "self._lane(display_name=lane_name) buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets) #", "media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type: bucket_name, } if settings:", "s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url) [ [data1, bucket1, key1, args1, ignore1],", "MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception @parameterized.expand(", "uploader_class, bucket_type, bucket_name, open_access, settings=None ): # Arrange book_title =", "] == uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep =", "\"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\",", "\"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\",", "DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None,", ") return rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep =", "ClientError from mock import MagicMock from parameterized import parameterized from", "def teardown_method(self): \"\"\"Deinitializes the test suite by removing all the", "locally running MinIO instance\"\"\" s3_client_class = None \"\"\"Factory function used", "intance of S3 uploader :rtype: S3Uploader \"\"\" if settings and", "\"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST,", "S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader", "Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\":", "\"UploadId\": 1, \"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, {", "functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes the test suite", "upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete() assert [ { \"Bucket\": \"bucket\",", "( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 1, 0, 0,", "edition, pool = self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"),", "uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform # Act", "assert True == MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert None", "host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY,", "called and mirror_url is # was not set. assert None", "= self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args, ignore]] =", "representations have had .mirror_url set and been # mirrored to", "assert False == upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [\"Part", "rep == upload.representation assert \"bucket\" == upload.bucket assert \"books.mrc\" ==", "# # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\")", ":type settings: Dict :param key: Key :type key: string :param", "scaled_size=None, region=None, ): # identifier = self._identifier(foreign_id=\"ABOOK\") # buckets =", "uploader, rep, rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep =", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "None, None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ] )", "uploader.buckets[\"foo\"] = object() result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result", "import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, )", "= Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition, pool", "assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\",", "scaled_size=scaled_size ) # Assert assert result == expected_result @parameterized.expand( [", "MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing", "< 10 def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location =", "\"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\",", "# Assert assert result == expected_result @parameterized.expand( [ ( \"with_default_region\",", "error -- # the exception propagates through. uploader.client.fail_with = Exception(\"crash!\")", ":param settings: Settings dictionary :type settings: Dict :param key: Key", "self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\", []): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name,", "upload.bucket assert \"books.mrc\" == upload.filename assert 1 == upload.part_number assert", "bucket, key, args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"]", "rep, rep.url) assert uploader == upload.uploader assert rep == upload.representation", "== 2 service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id =", "), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), (", "MockMultipartS3Upload.aborted assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content):", "\"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ] == upload.parts uploader.client.fail_with", "test_cover_image_root( self, name, bucket, data_source_name, expected_result, scaled_size=None, region=None, ): #", "= cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub,", "None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ), (", "self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock() # Act", "# m = uploader.cover_image_url # # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT)", "an SVG cover for the book. svg = \"\"\"<!DOCTYPE svg", "'thecovers'} # uploader = self._uploader(**buckets) # m = uploader.cover_image_url #", ") s3 = self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload( rep,", "@parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\",", "import BotoCoreError, ClientError from mock import MagicMock from parameterized import", "Assert assert result == expected_result @parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\",", "( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\",", "was never called and mirror_url is # was not set.", "( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ] ) def", "== expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename},", "\"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\",", "2 service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"]", "buckets == uploader.buckets # get_bucket just does a lookup in", "Arrange uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) # Act", "# Arrange uploader = self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket)", "open_access, settings=None ): # Arrange book_title = \"1234567890\" book_content =", "# wasn't informed of the irrelevant 'not-a-bucket-at-all' # setting. assert", "integration :type: Optional[Dict] :return: New intance of S3 uploader :rtype:", "bucket_type, bucket_name, open_access, settings=None ): # Arrange book_title = \"1234567890\"", "ID\", 1234, \"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) )", "\"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\",", "aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, )", "@parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\",", "\"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL,", "complete(self): raise Exception(\"Error!\") rep.mirror_exception = None # Failed during completion", "uploader_class or S3Uploader return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL =", "not in settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings", "def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username =", "= self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) # Act result =", ") def test_mirror( self, name, uploader_class, bucket_type, bucket_name, open_access, settings=None", "bucket, key, expected_result, url_transform=None, region=None ): # Arrange uploader =", "), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On", "cases, mirror_url was set to the result of final_mirror_url. assert", "= None # Failed during completion with s3.multipart_upload( rep, rep.url,", "\"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), ( \"with_scaled_size\",", "response = dict( Error=dict( Code=401, Message=\"Bad credentials\", ) ) uploader.client.fail_with", "\"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ),", "height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore", "called with bucket books-go, key here.epub\" == epub_rep.mirror_url ) assert", "buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets) # Act", "**settings ): \"\"\"Creates a new instance of S3 uploader :param", "upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort()", "\"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": { \"Parts\": [ {\"ETag\":", "== expected_result def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location =", "def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", }", "(Optional) S3 region :type region: Optional[string] :param addressing_style: (Optional) S3", "= self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD,", "= create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def", "assert client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"]", "= functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes the test", "), ] ) def test_marc_file_url( self, name, bucket, library_name, lane_name,", "epub\", ) epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A", "\"password\" not in settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if", "client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert", "( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0,", "== MockMultipartS3Upload.aborted assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self,", "# uploader = self._uploader(**buckets) # m = uploader.cover_image_url # #", "\"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if expiration_settings else {} s3_uploader =", "== expected_result @parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ),", "bucket} uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.marc_file_url(library,", "ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None", ") assert ( \"final_mirror_url was called with bucket covers-go, key", "assert 3 == upload.part_number assert [ {\"ETag\": \"etag\", \"PartNumber\": 1},", "= ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert", "rep, ignore = create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3", "DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\",", "( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create, ) from ..s3", "None \"\"\"Factory function used for creating a boto3 client inside", "Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep =", "\"\"\"You can specify a client class to use instead of", "test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted = None def", "{\"username\": username, \"password\": password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings,", "The NAME associated with this class must be the same", "1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, { \"Body\": \"Part 2\",", "self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and \"password\" not", "Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) # Successful upload", "), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0,", "be the same as its # key in the MirrorUploader", "self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ):", "a new instance of S3 uploader :param client_class: (Optional) Custom", "@parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\",", "ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\" in", "key here.png\" == cover_rep.mirror_url ) # mirrored-at was set when", "bucket in response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "object. assert \"a transform\" == uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None,", "\"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\",", "rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False ==", "settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings)", "\"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ),", "== expected_result @parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), (", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ),", "get_bucket just does a lookup in .buckets uploader.buckets[\"foo\"] = object()", "def test_final_mirror_url( self, name, bucket, key, expected_result, url_transform=None, region=None ):", "uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act result =", "= self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload", "self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]: bucket_name =", ") def test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange region =", ") from ..testing import DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now", "None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None,", "DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier,", "(\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] )", "(\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), (", "region=None, ): # Arrange library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name)", "representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type:", "region, filename) expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings", "( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\",", ") integration.username = \"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value", "Arrange region = \"us-east-1\" bucket = \"bucket\" filename = \"filename\"", "( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ] )", "filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore = create(", "\"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0),", "data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted =", "\"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\",", "MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete() assert [", "Assert assert result == expected_result @parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\",", "): \"\"\"Creates a new instance of S3 uploader :param client_class:", "( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), (", "epub_rep.mirrored_at assert None == epub_rep.mirror_exception # An S3 credential failure", "else None) assert aws_secret_access_key == (password if password != \"\"", "treated as a transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url)", "class which will be used insted of S3Uploader :type uploader_class:", "expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename}, )", "client_class, uploader_class, region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def test_names(self):", "key1, args1, ignore1], [data2, bucket2, key2, args2, ignore2], ] =", "identifier, filename, scaled_size=scaled_size ) # Assert assert result == expected_result", "), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ]", "601, \"us-east-3\", ), ] ) def test_cover_image_url( self, name, buckets,", "( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\",", "..mirror import MirrorUploader from ..model import ( DataSource, ExternalIntegration, Hyperlink,", "value): \"\"\"Adds a value to settings dictionary :param settings: Settings", "parameters[\"extension\"] = extension if title: parameters[\"title\"] = title if data_source_name:", "uploader = self._create_s3_uploader(MockS3Client) # A network failure is treated as", "0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), \"us-east-2\",", "== rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\") rep.mirror_exception =", "client_class = MagicMock() # Act S3Uploader(integration, client_class=client_class) # Assert assert", "Assert assert result == expected_result @parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:", "\"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\",", "\"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ),", "cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location,", "( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\",", ") if not client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader(", "buckets, identifier, expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True, ): #", "svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\">", "Optional[string] :param addressing_style: (Optional) S3 addressing style :type addressing_style: Optional[string]", "ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration", "[ (\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"),", "uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act result =", "0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1,", "client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can specify a client class to", "), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\",", "bucket, data_source_name, expected_result, scaled_size=None, region=None, ): # Arrange uploader =", "def test_book_url( self, name, buckets, identifier, expected_result, extension=None, data_source_name=None, title=None,", "= value else: settings = {key: value} return settings def", "in response assert len(response[\"Contents\"]) == 1 [object] = response[\"Contents\"] assert", "None) assert aws_secret_access_key == (password if password != \"\" else", "= s3_uploader.sign_url(url) # Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with(", "result == expected_result @parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\",", "[ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 1, 0,", "MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False def upload_part(self, content): self.parts.append(content)", "Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response", "0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), (", "s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we can verify", "region=None, open_access=True, ): # Arrange identifier = self._identifier(foreign_id=identifier) uploader =", "1 == upload.part_number assert [] == upload.parts assert 1 ==", "S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing import DatabaseTest from ..util.datetime_helpers", "name, username, password): # Arrange settings = {\"username\": username, \"password\":", "True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception @parameterized.expand( [ (", "2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, ] == uploader.client.parts assert", "dictionary :type settings: Dict :param key: Key :type key: string", "region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\"", "< 10 assert b\"i'm an epub\" == data2 assert \"books-go\"", "S3 integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username", "ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) #", "key) s3.final_mirror_url = mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\"", "\"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\"", "upload.filename assert 1 == upload.part_number assert [] == upload.parts assert", "url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 = self._create_s3_uploader(MockS3Client) # Successful upload with", "addressing_style=addressing_style ) # Act result = uploader.url(bucket, path) # Assert", "{ \"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\":", "uploader.book_url(**parameters) # Assert assert result == expected_result @parameterized.expand( [ (", "called with bucket %s, key %s\" % (bucket, key) s3.final_mirror_url", "bucket2, key2, args2, ignore2], ] = s3.client.uploads # Both representations", "), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] ) def", "ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get(\"username\", \"username\") integration.password = settings.get(\"password\",", "MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing import DatabaseTest", "== uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial(", "uploader.url(bucket, path) # Assert assert result == expected_result @parameterized.expand( [", "\"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _,", "_integration(self, **settings): \"\"\"Create and configure a simple S3 integration.\"\"\" integration", "( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE,", "assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # An", "Act S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count == 2 service_name", "= self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part", "rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload", "transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None ==", ") integration = self._integration(**settings) uploader_class = uploader_class or S3Uploader return", "None aborted = None def __init__(self, uploader, representation, mirror_to): self.parts", "0, 0), ), ] ) def test_marc_file_url( self, name, bucket,", "self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None,", "# get_bucket just does a lookup in .buckets uploader.buckets[\"foo\"] =", "== epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) # Mock final_mirror_url so we", "= {\"identifier\": identifier, \"open_access\": open_access} if extension: parameters[\"extension\"] = extension", "MockS3Client) def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\",", "\"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1,", "datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1,", "self._url # Create an SVG cover for the book. svg", "assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\" in data assert b\"PNG\"", "MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url) # Assert assert result", "self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part", ") # 'Upload' it to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation,", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "== ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self):", "\"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\",", "from urllib.parse import urlsplit import boto3 import botocore import pytest", "\"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT,", "self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload )", "media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep", "( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\",", "be used instead of boto3's client class :type client_class: Optional[Type]", "\"Part 2\", \"UploadId\": 1, \"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\",", "config.signature_version == botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name", "in response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object", "# buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader = self._uploader(**buckets)", "None == epub_rep.mirror_exception # Because the file was not successfully", "@parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), (", "100, ), ] ) def test_sign_url(self, name, expiration_settings, expected_expiration): #", "addressing_style: Optional[string] :param settings: Kwargs used for initializing an external", "\"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"bucket\", } buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\"", "( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ),", "use instead of boto3.client.\"\"\" integration = self._integration() uploader = S3Uploader(integration,", "1, 2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0,", "assert b\"PNG\" not in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed", "\"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\",", "for creating a boto3 client inside S3Uploader\"\"\" @classmethod def setup_class(cls):", "Act result = s3_uploader.sign_url(url) # Assert assert result == expected_url", "\"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ),", "book_url) [ [data1, bucket1, key1, args1, ignore1], [data2, bucket2, key2,", "== S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if username != \"\"", "result == expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\",", "( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\",", "start_time=None, region=None, ): # Arrange library = self._library(short_name=library_name) lane =", "assert result == expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\",", "assert result == expected_result def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True)", "class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER =", "= Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\") def test_complete(self): uploader =", "associated with this class must be the same as its", "(\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ]", "( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\",", "result == expected_result @parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "new instance of S3 uploader :param client_class: (Optional) Custom class", "botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0]", "1, 1, 0, 0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\",", "(\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), (", "upload = MultipartS3Upload(uploader, rep, rep.url) assert uploader == upload.uploader assert", "not in settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not", "( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\",", "Failed during upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as", ":rtype: S3Uploader \"\"\" settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region )", "the representation was 'mirrored' for rep in epub_rep, cover_rep: assert", "test_split_url(self, name, url, expected_result, unquote=True): # Arrange s3_uploader = self._create_s3_uploader()", "upload.abort() assert [] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [", "exception propagates through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url)", "client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER", "bucket1 assert \"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert", "= self._create_s3_uploader(MockS3Client) # A network failure is treated as a", ") settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration =", "rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete() assert [ {", "( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\",", "%s, key %s\" % (bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url", "assert aws_access_key_id == None assert aws_secret_access_key == None assert config.signature_version", "# Arrange uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act", "bucket books-go, key here.epub\" == epub_rep.mirror_url ) assert ( \"final_mirror_url", "(\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), (", "a transient error -- # the exception propagates through. uploader.client.fail_with", "= self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act result = uploader.url(bucket,", "), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\",", "expected_result def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\"", "\"rb\").read() cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content,", "), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), (", "self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\": identifier, \"open_access\": open_access} if extension:", "rep, rep.url, upload_class=MockMultipartS3Upload ) as upload: assert [] == upload.parts", "# Mock final_mirror_url so we can verify that it's called", "scaled_size=None, region=None, ): # Arrange uploader = self._create_s3_uploader(region=region) data_source =", "set when the representation was 'mirrored' for rep in epub_rep,", "Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\",", "settings: settings.update(buckets) else: settings = buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class,", "= S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets =", "\"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename", "key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In both cases, mirror_url", "\"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\",", "== expected_result def test_key_join(self): \"\"\"Test the code used to build", "result = uploader.marc_file_root(bucket, library) # Assert assert result == expected_result", "% (bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url", "def test_split_url(self, name, url, expected_result, unquote=True): # Arrange s3_uploader =", "minio_s3_client = None \"\"\"boto3 client connected to locally running MinIO", "\"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300,", "assert \"books-go\" == bucket2 assert \"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE", "unquote=True): # Arrange s3_uploader = self._create_s3_uploader() # Act result =", "\"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\",", "= MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result =", "**settings): \"\"\"Create and configure a simple S3 integration.\"\"\" integration =", "= Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets", "rep.url, upload_class=MockMultipartS3Upload ) as upload: assert [] == upload.parts assert", "expected_result, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) library =", "self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings )", "configure a simple S3 integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL,", "SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _,", "settings = expiration_settings if expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region,", "== (username if username != \"\" else None) assert aws_secret_access_key", "rep.url) assert uploader == upload.uploader assert rep == upload.representation assert", "upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part", "\"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ),", "\"books.mrc\", }, ] == uploader.client.parts assert 3 == upload.part_number assert", "), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ]", "becomes the .url_transform # attribute on the S3Uploader object. assert", "open_access} if extension: parameters[\"extension\"] = extension if title: parameters[\"title\"] =", "( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\",", "= {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader = self._uploader(**buckets) # m", "assert b\"svg\" in data assert b\"PNG\" not in data def", "integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\" integration.password", "\"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300,", "client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config", "epub_rep.mirrored_at assert None == epub_rep.mirror_exception # Because the file was", "bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\", []): object_key", "test_custom_client_class(self): \"\"\"You can specify a client class to use instead", "import boto3 import botocore import pytest from botocore.exceptions import BotoCoreError,", "), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On", "= client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name == \"s3\" assert region_name", "None \"\"\"boto3 client connected to locally running MinIO instance\"\"\" s3_client_class", "the test suite by removing all the buckets from MinIO\"\"\"", "self._create_s3_uploader() # Act result = s3_uploader.split_url(url, unquote) # Assert assert", "= DataSource.lookup(self._db, data_source_name) # Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size)", "as a transient error -- # the exception propagates through.", "with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\")", "transient error. response = dict( Error=dict( Code=401, Message=\"Bad credentials\", )", "\"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\",", "boto3's client class :type client_class: Optional[Type] :param: uploader_class: (Optional) Custom", "\"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ),", "that it's called with # the right arguments def mock_final_mirror_url(bucket,", "open_access=open_access) s3_uploader.mirror_one(representation, book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\"", "expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1,", ") def teardown_method(self): \"\"\"Deinitializes the test suite by removing all", "intance of S3 uploader :rtype: S3Uploader \"\"\" settings = self._add_settings_value(", "\"\"\"Deinitializes the test suite by removing all the buckets from", "None assert aws_secret_access_key == None assert config.signature_version == botocore.UNSIGNED assert", "S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3,", "( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\",", "Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response assert len(response[\"Contents\"])", "setting becomes the .url_transform # attribute on the S3Uploader object.", "and mirror_url is # was not set. assert None ==", "from ..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings):", "return settings def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings", "key, args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert", "MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url)", "bucket, library_name, lane_name, end_time, expected_result, start_time=None, region=None, ): # Arrange", "region=None ): # Arrange uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name)", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False, ),", "\"MultipartUpload\": { \"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\",", "# key in the MirrorUploader implementation registry, and it's #", "eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\", scaled_size=601)) # Arrange data_source =", "settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class", "A bug in the code is not treated as a", "s3 = self._create_s3_uploader(MockS3Client) # Successful upload with s3.multipart_upload( rep, rep.url,", "parameters = {\"identifier\": identifier, \"open_access\": open_access} if extension: parameters[\"extension\"] =", "\"\"\"Create and configure a simple S3 integration.\"\"\" integration = self._external_integration(", "Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings", "return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings ) class", "utf-8 import functools import os from urllib.parse import urlsplit import", "), ( \"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ]", "( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\",", "datetime_utc(2020, 1, 1, 0, 0, 0), ), ] ) def", "2\") upload.complete() assert [ { \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\":", "upload.complete() assert [ { \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1,", "epub_rep.mirror_url ) assert ( \"final_mirror_url was called with bucket covers-go,", "rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception(\"Error!\") # Failed", "upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed", "result == expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\", \"the key\",", "cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore = pool.add_link(", "\"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\",", "was not successfully uploaded, # final_mirror_url was never called and", "about the configured buckets. It # wasn't informed of the", "\"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\",", "= pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload'", "pytest from botocore.exceptions import BotoCoreError, ClientError from mock import MagicMock", "instance\"\"\" s3_client_class = None \"\"\"Factory function used for creating a", "of boto3.client.\"\"\" integration = self._integration() uploader = S3Uploader(integration, MockS3Client) assert", "( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\",", "# Assert assert result == expected_result @parameterized.expand( [ ( \"without_scaled_size\",", "Dict :param key: Key :type key: string :param value: Value", "import pytest from botocore.exceptions import BotoCoreError, ClientError from mock import", "\"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None,", "\"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the", "s3_uploader.sign_url(url) # Assert assert result == expected_url s3_uploader.split_url.assert_called_once_with(url) s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\",", "\"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and \"password\" not in settings:", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\",", "Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep = epub.resource.representation uploader =", "None assert config.signature_version == botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE", "), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0,", "{S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False,", "self._create_s3_uploader(region=region, **buckets) # Act result = uploader.marc_file_url(library, lane, end_time, start_time)", "of S3 uploader :rtype: S3Uploader \"\"\" settings = self._add_settings_value( settings,", "\"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\",", "client connected to locally running MinIO instance\"\"\" s3_client_class = None", "None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ), (", "\"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] ) def test_content_root(self, name,", "create, ) from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload,", ":type client_class: Optional[Type] :param: uploader_class: (Optional) Custom class which will", "(\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), (", "= self._create_s3_uploader(region=region, **buckets) # Act result = uploader.cover_image_url( data_source, identifier,", "\"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\", \"SHORT\", \"http://my-feed/SHORT/\"), (\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"),", "( \"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\",", "Exception(\"Error!\") rep.mirror_exception = None # Failed during completion with s3.multipart_upload(", "= self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover,", "False == upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [\"Part 1\",", "rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload", "a client class to use instead of boto3.client.\"\"\" integration =", "object in response.get(\"Contents\", []): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name)", "integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username =", "uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\") def test_complete(self): uploader", "== upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url)", "content=content, ) cover_rep = cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location", "False, ), ] ) def test_book_url( self, name, buckets, identifier,", "1, 0, 0, 0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\",", "assert \"Contents\" in response assert len(response[\"Contents\"]) == 1 [object] =", "), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ),", "S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ),", "\"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT,", ".mirror_url set and been # mirrored to those URLs. assert", "in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can specify a client class", "uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception", ") SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get(", "ignore = create( self._db, Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep", "def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None aborted = None", "\"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\",", "( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0,", "Because the file was not successfully uploaded, # final_mirror_url was", "Arrange uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform #", "== epub_rep.mirror_exception # Because the file was not successfully uploaded,", "rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location", "data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source # Act", "\"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), (", "complete(self): MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted = True rep,", "parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die Flügelmaus+.epub\"] assert", ") # mirrored-at was set when the representation was 'mirrored'", "3\") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload", "integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock()", "called with bucket covers-go, key here.png\" == cover_rep.mirror_url ) #", "Exception(\"Error!\") # Failed during upload with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload", "Arrange s3_uploader = self._create_s3_uploader() # Act result = s3_uploader.split_url(url, unquote)", "\"directory/filename.jpg\"), ), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\",", "def upload_part(self, content): raise Exception(\"Error!\") # Failed during upload with", "isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"banana\", S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:", "assert \"books.mrc\" == upload.filename assert 1 == upload.part_number assert []", "\"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\",", "\"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\",", "Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds < 10 assert", "settings: Kwargs used for initializing an external integration :type: Optional[Dict]", "lane, end_time, start_time) # Assert assert result == expected_result @parameterized.expand(", "buckets_plus_irrelevant_setting = dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) #", "of the external # integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert", "None, \"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\",", "self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\")", "with # the right arguments def mock_final_mirror_url(bucket, key): return \"final_mirror_url", "expected_result @parameterized.expand( [ ( \"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), (", "keys from parts.\"\"\" parts = [\"Gutenberg\", b\"Gutenberg ID\", 1234, \"Die", "s3_client_class = None \"\"\"Factory function used for creating a boto3", "ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def _representation(self):", "name, buckets, identifier, expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True, ):", "value} return settings def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None,", "\"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT,", "self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): \"\"\"Creates a new", "of boto3's client class :type client_class: Optional[Type] :param: uploader_class: (Optional)", "] ) def test_marc_file_url( self, name, bucket, library_name, lane_name, end_time,", "test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content =", "assert \"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now()", "DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "_, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client", "right arguments def mock_final_mirror_url(bucket, key): return \"final_mirror_url was called with", "rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort() assert [] ==", "(Optional) Custom class to be used instead of boto3's client", "MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing import DatabaseTest from", "\"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm", "error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at", "edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep = epub.resource.representation assert", "and \"username\" not in settings: self._add_settings_value( settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER )", "m(unglueit, identifier, \"filename\", scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db, data_source_name)", "Assert assert result == expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\", \"marc\",", "upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort() assert [] == uploader.client.parts @pytest.mark.minio", "response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in", "\"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value,", "\"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ] )", "expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if expiration_settings", "else: settings = {key: value} return settings def _create_s3_uploader( self,", "self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) assert", "self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act result = uploader.url(bucket, path)", "1, 1, 0, 0, 0), ), ] ) def test_marc_file_url(", "( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), (", "\"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class = self.s3_client_class return", "expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), (", "svg = \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg", "def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload =", "\"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None,", "S3Uploader \"\"\" settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings", "Arrange identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters =", "been # mirrored to those URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\"", "uploader == upload.uploader assert rep == upload.representation assert \"bucket\" ==", "data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source # Act result", "assert True == isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting becomes", "= dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This", "Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader =", "the file was not successfully uploaded, # final_mirror_url was never", "Optional[Type] :param: uploader_class: (Optional) Custom class which will be used", "assert ( \"final_mirror_url was called with bucket covers-go, key here.png\"", ") buckets = { bucket_type: bucket_name, } if settings: settings.update(buckets)", "url_transform=None, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) if url_transform:", "== result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None,", "\"a-path\", \"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None,", "[ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\",", "( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\",", "b\"i'm an epub\" == data2 assert \"books-go\" == bucket2 assert", "settings and \"password\" not in settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD", "result == expected_result def test_mirror_one(self): edition, pool = self._edition(with_license_pool=True) original_cover_location", "assert result == expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\", \"bucket\", \"the", "Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT,", "\"books.mrc\", }, { \"Body\": \"Part 2\", \"UploadId\": 1, \"PartNumber\": 2,", "key, expected_result, url_transform=None, region=None ): # Arrange uploader = self._create_s3_uploader(region=region)", "upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [ { \"Body\": \"Part 1\",", "rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False", "New intance of S3 uploader :rtype: S3Uploader \"\"\" if settings", "S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def", "S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings) uploader_class = uploader_class or", "test_marc_file_root( self, name, bucket, library_name, expected_result, region=None ): # Arrange", ":type key: string :param value: Value :type value: Any :return:", "S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform ) assert result == expected_result def test_key_join(self):", "region = \"us-east-1\" bucket = \"bucket\" filename = \"filename\" url", "( S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration = self._external_integration(", "@parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\",", "client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name == \"s3\"", "service_name = client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key", "for object in response.get(\"Contents\", []): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key)", "# A bug in the code is not treated as", "\"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), ( \"explicit_s3_url_template\",", "self._create_s3_uploader(region=region, **buckets) # Act result = uploader.cover_image_url( data_source, identifier, filename,", "cover_rep.mirror_url ) # mirrored-at was set when the representation was", "False == upload.completed assert False == upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part", "] == upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\")", "Act result = uploader.url(bucket, path) # Assert assert result ==", "{ \"Body\": \"Part 2\", \"UploadId\": 1, \"PartNumber\": 2, \"Bucket\": \"bucket\",", "[[data, bucket, key, args, ignore]] = s3.client.uploads assert Representation.SVG_MEDIA_TYPE ==", "name, bucket, key, expected_result, url_transform=None, region=None ): # Arrange uploader", "self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier,", "(Optional) S3 addressing style :type addressing_style: Optional[string] :param settings: Kwargs", "credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL,", "{ \"Body\": \"Part 1\", \"UploadId\": 1, \"PartNumber\": 1, \"Bucket\": \"bucket\",", "= \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\" uploader =", "\"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\",", "\"\"\" settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings =", "Identifier, Representation, create, ) from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration,", "filename = \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url =", "\"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\"", "\"final_mirror_url was called with bucket books-go, key here.epub\" == epub_rep.mirror_url", "\"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\",", "self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) # Act result = uploader.cover_image_root(bucket,", "assert rep == upload.representation assert \"bucket\" == upload.bucket assert \"books.mrc\"", "2}, ], }, } ] == uploader.client.uploads def test_abort(self): uploader", "url_transform: uploader.url_transform = url_transform # Act result = uploader.final_mirror_url(bucket, key)", "assert result == expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\",", "region=None, ): # identifier = self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY", "settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_REGION, region ) settings = self._add_settings_value(", "Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self): uploader =", "= client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name ==", "S3Uploader \"\"\" if settings and \"username\" not in settings: self._add_settings_value(", "S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ),", "( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\",", "), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ),", "edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation assert None ==", "( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\",", "class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore = create( self._db, Representation,", "upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\") def test_complete(self):", "== None assert aws_secret_access_key == None assert config.signature_version == botocore.UNSIGNED", "== MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL", "= expiration_settings if expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region, **settings)", "name, bucket, data_source_name, expected_result, scaled_size=None, region=None, ): # Arrange uploader", "assert \"a transform\" == uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None, None),", "0, 0, 0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\",", "password != \"\" else None) assert \"config\" not in client_class.call_args_list[1].kwargs", "{\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ] ==", "wasn't informed of the irrelevant 'not-a-bucket-at-all' # setting. assert buckets", "result = uploader.final_mirror_url(bucket, key) # Assert if not url_transform: assert", "url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client)", ") # Act result = uploader.url(bucket, path) # Assert assert", "0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0),", "assert [ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2},", "\"Bucket\": \"bucket\", \"Key\": \"books.mrc\", \"UploadId\": 1, \"MultipartUpload\": { \"Parts\": [", "S3Uploader :type uploader_class: Optional[Type] :param region: (Optional) S3 region :type", "uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert result == expected_result @parameterized.expand(", "1, 0, 0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\",", "\"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\",", "SVG cover for the book. svg = \"\"\"<!DOCTYPE svg PUBLIC", "cover for the book. svg = \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD", "to settings dictionary :param settings: Settings dictionary :type settings: Dict", "MockMultipartS3Upload.aborted = True rep, ignore = create( self._db, Representation, url=\"http://books.mrc\",", "None == epub_rep.mirror_exception # An S3 credential failure is treated", "Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it to S3. s3 =", "(username if username != \"\" else None) assert aws_secret_access_key ==", "expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True, ): # Arrange identifier", "mock_final_mirror_url(bucket, key): return \"final_mirror_url was called with bucket %s, key", "a transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert None", "= Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self): uploader", "%s\" % (bucket, key) s3.final_mirror_url = mock_final_mirror_url book_url = \"http://books-go/here.epub\"", ") epub_rep = epub.resource.representation assert None == epub_rep.mirrored_at s3 =", "dict(buckets) buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader", "assert (utc_now() - rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition, pool", "uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at", "expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket,", "aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def", ") # Assert assert result == expected_result @parameterized.expand( [ (", "\"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\",", "settings = buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name)", "\"SIMPLIFIED_TEST_MINIO_PASSWORD\", \"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit(", "== cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD,", "bucket_name = bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\",", "= self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get(\"username\", \"username\")", "\"Part 3\") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation()", "), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ), (\"custom_http_url\", \"http://my-feed/\",", "\"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\",", "path, expected_result, region=None, addressing_style=None ): # Arrange uploader = self._create_s3_uploader(", "client class to use instead of boto3.client.\"\"\" integration = self._integration()", "== upload.representation assert \"bucket\" == upload.bucket assert \"books.mrc\" == upload.filename", "= self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\", []): object_key = object[\"Key\"]", "# identifier = self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'}", "] ) def test_final_mirror_url( self, name, bucket, key, expected_result, url_transform=None,", "== rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), (", "the MirrorUploader implementation registry, and it's # better if it's", "uploader :rtype: S3Uploader \"\"\" if settings and \"username\" not in", "buckets. It # wasn't informed of the irrelevant 'not-a-bucket-at-all' #", "assert [\"Part 1\", \"Part 2\"] == upload.parts assert True ==", "\"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\",", "= {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets) # Act result", "== upload.uploader assert rep == upload.representation assert \"bucket\" == upload.bucket", "\"bucket\" == upload.bucket assert \"books.mrc\" == upload.filename assert 1 ==", "\"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ),", "Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation assert", "identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) # Act result", "here.png\" == cover_rep.mirror_url ) # mirrored-at was set when the", "== uploader.client.uploads def test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation()", "title: parameters[\"title\"] = title if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT)", "= self._create_s3_uploader(region=region) # Act result = uploader.content_root(bucket) # Assert assert", "S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader,", "buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act", "\"directory/filename with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False,", "URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1 assert \"here.png\" ==", "S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create and configure a simple S3", "= url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if expiration_settings else", ":param key: Key :type key: string :param value: Value :type", "] ) def test_marc_file_root( self, name, bucket, library_name, expected_result, region=None", "0), \"https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), ]", "= settings.get(\"password\", \"password\") return integration def _add_settings_value(self, settings, key, value):", "\"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\",", "# Act result = uploader.content_root(bucket) # Assert assert result ==", "2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0,", "</svg>\"\"\" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg,", "\"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\", \"us-east-2\", ),", "\"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] ) def test_marc_file_root( self, name, bucket,", "= self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket,", "just does a lookup in .buckets uploader.buckets[\"foo\"] = object() result", "original = self._url # Create an SVG cover for the", "identifier = self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} #", "s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args, ignore]]", "= MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete() assert", "test_abort(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload = MultipartS3Upload(uploader,", "assert None == epub_rep.mirror_exception # Because the file was not", "buckets_plus_irrelevant_setting[\"not-a-bucket-at-all\"] = \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows", "(\"custom_https_url\", \"https://my-feed/\", \"SHORT\", \"https://my-feed/SHORT/\"), ] ) def test_marc_file_root( self, name,", "region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\"", "\"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\", \"bucket\",", "username, \"password\": password} integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, )", "boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial( boto3.client,", "0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1,", "DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT,", "upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [\"Part 1\", \"Part 2\"]", "addressing_style=None, **settings ): \"\"\"Creates a new instance of S3 uploader", "1, 2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0,", "DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ] ) def", "style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original, edition.data_source, Representation.SVG_MEDIA_TYPE,", "filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url) #", "1, 1, 0, 0, 0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\",", "client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name == \"s3\" assert region_name ==", "assert \"Error!\" == rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION),", "assert config.signature_version == botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE )", "client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style,", "= DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets)", "\"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ] == upload.parts", "end_time, start_time) # Assert assert result == expected_result @parameterized.expand( [", "None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\",", "\"us-east-3\", False, ), ] ) def test_book_url( self, name, buckets,", "\"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ] ) def test_cover_image_url(", "__init__(self, uploader, representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed = False", "mirror_url is # was not set. assert None == epub_rep.mirror_url", "\"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"https_url\",", "( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\",", "expected_result, unquote=True): # Arrange s3_uploader = self._create_s3_uploader() # Act result", "library_name, lane_name, end_time, expected_result, start_time=None, region=None, ): # Arrange library", "\"s3_url\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/\", ), ( \"s3_url_with_custom_region\", \"test-marc-s3-bucket\", \"SHORT\", \"https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/\",", "password): # Arrange settings = {\"username\": username, \"password\": password} integration", "S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\",", "def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation() upload =", "response.get(\"Contents\", []): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader(", "= uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert result == expected_result", "\"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "\"etag\", \"PartNumber\": 2}, ], }, } ] == uploader.client.uploads def", "\"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with", "upload.upload_part, \"Part 3\") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep =", "data_source # Act result = uploader.book_url(**parameters) # Assert assert result", "import DatabaseTest from ..util.datetime_helpers import datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def", "\"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\",", "better if it's the same as the name of the", "epub_rep.mirror_exception # Because the file was not successfully uploaded, #", "as a transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep, self._url) assert", ") assert result == expected_result def test_key_join(self): \"\"\"Test the code", "Assert assert result == expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\",", "Act result = uploader.final_mirror_url(bucket, key) # Assert if not url_transform:", "NAME associated with this class must be the same as", "{\"identifier\": identifier, \"open_access\": open_access} if extension: parameters[\"extension\"] = extension if", "completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part", "\"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about the", "knows about the configured buckets. It # wasn't informed of", "expected_expiration): # Arrange region = \"us-east-1\" bucket = \"bucket\" filename", "None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), (", "region ) settings = self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration", "\"Error!\" == rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ),", "**buckets) parameters = {\"identifier\": identifier, \"open_access\": open_access} if extension: parameters[\"extension\"]", "S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert aws_secret_access_key == None assert", "name of the external # integration. assert S3Uploader.NAME == ExternalIntegration.S3", "service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key", "= DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', #", "in the MirrorUploader implementation registry, and it's # better if", "both cases, mirror_url was set to the result of final_mirror_url.", "}, ] == uploader.client.parts assert 3 == upload.part_number assert [", "extension=None, data_source_name=None, title=None, region=None, open_access=True, ): # Arrange identifier =", "to use instead of boto3.client.\"\"\" integration = self._integration() uploader =", "functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\",", "\"us-east-3\", ), ] ) def test_cover_image_url( self, name, buckets, data_source_name,", "\"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\", \"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\",", "S3 addressing style :type addressing_style: Optional[string] :param settings: Kwargs used", "cover_rep.mirrored_at).seconds < 10 assert b\"i'm an epub\" == data2 assert", "name, buckets, data_source_name, identifier, filename, expected_result, scaled_size=None, region=None, ): #", "super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings ) class TestS3Uploader(S3UploaderTest):", "\"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\",", "( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\",", "MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]:", "= DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source # Act result =", "300, \"us-east-3\", ), ] ) def test_cover_image_root( self, name, bucket,", "result == expected_result @parameterized.expand( [ ( \"s3_path_style_request_without_region\", \"https://s3.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"),", "instance of S3 uploader :param client_class: (Optional) Custom class to", "), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\",", "spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ]", "\"with_s3_bucket_and_end_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 1, 0, 0, 0),", "): # Arrange uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name) #", "= client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ]", "assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload, uploader,", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "self._url) def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original = self._url", "= None \"\"\"Factory function used for creating a boto3 client", "None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception(\"Error!\")", "= boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class = functools.partial(", "\"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None,", "the result of final_mirror_url. assert ( \"final_mirror_url was called with", "[] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False def upload_part(self, content):", "# Successful upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as", "] ) def test_cover_image_root( self, name, bucket, data_source_name, expected_result, scaled_size=None,", "= self._url # Create an SVG cover for the book.", "# Assert assert result == expected_result @parameterized.expand( [ ( \"with_s3_bucket_and_end_time\",", "Dict \"\"\" if value: if settings: settings[key] = value else:", "key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None,", "\"test-bucket\", False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, {", "\"aws_secret_access_key\" ] assert service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION", "\"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), ( \"with_overdrive_data_source_and_scaled_size\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), (", "\"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\",", ") as upload: upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed assert", "= uploader.content_root(bucket) # Assert assert result == expected_result @parameterized.expand( [", "= \"value\" uploader = self._create_s3_uploader(**buckets_plus_irrelevant_setting) # This S3Uploader knows about", "Representation, url=\"http://bucket/books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) return rep def test_init(self): uploader =", "transform\" uploader = MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader) #", "self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore", "# Assert if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform", "data_source_name) identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) # Act", "( \"implicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), (", "s3.client.uploads assert Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\" in data assert", "\"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ), ] ) def test_cover_image_root( self, name,", "test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\"", "MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception class", "\"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ), ] ) def test_url(", "simple S3 integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings )", "assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if username", "None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value,", "True rep, ignore = create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, )", "== MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert None == rep.mirror_exception", "pool = self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link(", "return uploader_class(integration, client_class=client_class) class S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\"", "\"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, ] == uploader.client.parts assert 3", "\"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\",", "= \"bucket\" filename = \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename)", "\".pdf\", DataSource.UNGLUE_IT, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "assert False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted assert \"Error!\"", "@classmethod def setup_class(cls): \"\"\"Initializes the test suite by creating a", "= \"filename\" url = \"https://{0}.s3.{1}.amazonaws.com/{2}\".format(bucket, region, filename) expected_url = url", "( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\",", "upload.parts assert False == upload.completed assert False == upload.aborted upload.upload_part(\"Part", "DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/\", 300, ), ( \"with_gutenberg_cover_generator_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None,", "): # Arrange uploader = self._create_s3_uploader(region=region) if url_transform: uploader.url_transform =", "== bucket1 assert \"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"]", "rep.mirror_exception = None # Failed during completion with s3.multipart_upload( rep,", "region: Optional[string] :param addressing_style: (Optional) S3 addressing style :type addressing_style:", "0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2,", "== rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception(\"Error!\") #", "), ( \"s3_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\",", "title if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source", "_create_s3_uploader( self, client_class=None, uploader_class=None, region=None, addressing_style=None, **settings ): \"\"\"Creates a", "datetime_utc, utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create and configure", "region=None, addressing_style=None, **settings ): \"\"\"Creates a new instance of S3", "10 assert b\"i'm an epub\" == data2 assert \"books-go\" ==", "Successful upload with s3.multipart_upload( rep, rep.url, upload_class=MockMultipartS3Upload ) as upload:", "S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == (username if username != \"\" else", "MagicMock() # Act S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count ==", "rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\") assert False", "function used for creating a boto3 client inside S3Uploader\"\"\" @classmethod", "= self._create_s3_uploader(region=region) if url_transform: uploader.url_transform = url_transform # Act result", "class TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME associated with this", "rep, rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation()", "= [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False def upload_part(self,", "integration = self._external_integration( ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings ) integration.username = settings.get(\"username\",", "end_time, expected_result, start_time=None, region=None, ): # Arrange library = self._library(short_name=library_name)", "\"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None,", "it to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket,", "\"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ] ) def test_initialization(self, name, username,", "unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename',", "aws_secret_access_key == None assert config.signature_version == botocore.UNSIGNED assert ( config.s3[\"addressing_style\"]", "DataSource.lookup(self._db, data_source_name) # Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) #", "upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete()", "), ( \"http_url_template\", \"bucket\", \"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), (", "0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2,", "upload.parts assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, MultipartS3Upload,", "S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes the test suite by creating", "value: Any :return: Updated settings dictionary :rtype: Dict \"\"\" if", "{S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} uploader = self._create_s3_uploader(region=region, **buckets) # Act result =", "( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\",", "\"UploadId\": 1, \"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, ]", ") def test_book_url( self, name, buckets, identifier, expected_result, extension=None, data_source_name=None,", "self.minio_s3_client.list_buckets() for bucket in response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response =", "if settings: settings[key] = value else: settings = {key: value}", "client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert", "assert \"here.epub\" == key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In", "filename) expected_url = url + \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if", "2\", \"UploadId\": 1, \"PartNumber\": 2, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", },", "parameters[\"title\"] = title if data_source_name: data_source = DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"]", "\"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url(", "( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL },", "key in the MirrorUploader implementation registry, and it's # better", "\"us-east-3\", ), ] ) def test_cover_image_root( self, name, bucket, data_source_name,", "( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL },", "uploader.marc_file_url(library, lane, end_time, start_time) # Assert assert result == expected_result", "raise Exception(\"Error!\") rep.mirror_exception = None # Failed during completion with", "uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size ) # Assert assert result", "( \"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] )", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\",", "= MagicMock(return_value=expected_url) # Act result = s3_uploader.sign_url(url) # Assert assert", "\"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\", \"a-path\",", "\"<PASSWORD>\" ) _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand( [ (", ":return: Updated settings dictionary :rtype: Dict \"\"\" if value: if", "epub_rep.mirror_url # A bug in the code is not treated", "Arrange settings = {\"username\": username, \"password\": password} integration = self._external_integration(", "rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\")", "= self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\" integration.password =", "an external integration :type: Optional[Dict] :return: New intance of S3", "None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ]", "buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader = self._uploader(**buckets) #", ") def test_marc_file_root( self, name, bucket, library_name, expected_result, region=None ):", "[ ( \"s3_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_dummy_url_with_path_without_slash\",", "def test_custom_client_class(self): \"\"\"You can specify a client class to use", "SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\",", "utc_now class S3UploaderTest(DatabaseTest): def _integration(self, **settings): \"\"\"Create and configure a", "verify that it's called with # the right arguments def", "), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On", "\"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\",", "epub\" == data2 assert \"books-go\" == bucket2 assert \"here.epub\" ==", "= uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result @parameterized.expand( [ ( \"s3_url_with_path_without_slash\",", "MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def test_mirror( self, name,", "**buckets) # Act result = uploader.marc_file_url(library, lane, end_time, start_time) #", "def test_mirror_failure(self): edition, pool = self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub,", "key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\", \"the key\",", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets = { bucket_type: bucket_name, }", "args[\"ContentType\"] assert b\"svg\" in data assert b\"PNG\" not in data", "value to settings dictionary :param settings: Settings dictionary :type settings:", "data2 assert \"books-go\" == bucket2 assert \"here.epub\" == key2 assert", "këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\",", "# identifier = self._identifier(foreign_id=\"ABOOK\") # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\",", "), ] ) def test_sign_url(self, name, expiration_settings, expected_expiration): # Arrange", "uploader.final_mirror_url(bucket, key) # Assert if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT", "instead of boto3's client class :type client_class: Optional[Type] :param: uploader_class:", "upload.upload_part(\"Part 1\") assert False == MockMultipartS3Upload.completed assert True == MockMultipartS3Upload.aborted", "a value to settings dictionary :param settings: Settings dictionary :type", "\"PartNumber\": 2}, ] == upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part,", "SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( \"SIMPLIFIED_TEST_MINIO_PASSWORD\",", "in settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class:", "# Arrange identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters", "cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE,", "Hyperlink, Identifier, Representation, create, ) from ..s3 import ( MinIOUploader,", "1, 1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\",", "extension: parameters[\"extension\"] = extension if title: parameters[\"title\"] = title if", "\"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), \"us-east-2\", ), (", "and configure a simple S3 integration.\"\"\" integration = self._external_integration( ExternalIntegration.S3,", "expected_result def test_key_join(self): \"\"\"Test the code used to build S3", "\"\"\"Creates a new instance of S3 uploader :param client_class: (Optional)", "of the irrelevant 'not-a-bucket-at-all' # setting. assert buckets == uploader.buckets", "\"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\", \"the key\",", "self._edition(with_license_pool=True) original = self._url # Create an SVG cover for", "\"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\",", "= object() result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result @parameterized.expand(", "identifier, filename, expected_result, scaled_size=None, region=None, ): # identifier = self._identifier(foreign_id=\"ABOOK\")", "# mirrored to those URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\" ==", "s3.final_mirror_url = mock_final_mirror_url book_url = \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation,", "self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted =", "Assert if not url_transform: assert ( S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform )", "= self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region, addressing_style, **settings", "self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url) [[data, bucket, key, args, ignore]] = s3.client.uploads", "buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response = self.minio_s3_client.list_buckets() for bucket", "client_class=client_class) # Assert assert client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0]", "original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, ) cover_rep = cover.resource.representation assert None", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\", None, DataSource.UNGLUE_IT, ), ( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "= extension if title: parameters[\"title\"] = title if data_source_name: data_source", "Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep = epub.resource.representation assert None", "\"PartNumber\": 2}, ], }, } ] == uploader.client.uploads def test_abort(self):", "representation was 'mirrored' for rep in epub_rep, cover_rep: assert (utc_now()", "assert None == epub_rep.mirror_exception # An S3 credential failure is", "== epub_rep.mirror_url # A bug in the code is not", "settings, \"username\", self.SIMPLIFIED_TEST_MINIO_USER ) if settings and \"password\" not in", "0, 0), ), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1,", "(\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), (", "\"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand( [", "= self._create_s3_uploader(region=region, **buckets) # Act result = uploader.marc_file_url(library, lane, end_time,", "original, edition.data_source, Representation.SVG_MEDIA_TYPE, content=svg, ) # 'Upload' it to S3.", "content=\"i'm an epub\", ) epub_rep = epub.resource.representation assert None ==", "\"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ),", "@parameterized.expand( [ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION:", "parameterized import parameterized from ..mirror import MirrorUploader from ..model import", "cover_rep: assert (utc_now() - rep.mirrored_at).seconds < 10 def test_mirror_failure(self): edition,", "== MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\",", "self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location,", "}, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\", False, { MinIOUploaderConfiguration.ENDPOINT_URL:", "uploader_class: Optional[Type] :param region: (Optional) S3 region :type region: Optional[string]", "..model import ( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation, create, )", "\"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/\", 300, \"us-east-3\", ),", "can verify that it's called with # the right arguments", "1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ] == upload.parts uploader.client.fail_with =", "\"a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None,", "network failure is treated as a transient error. uploader.client.fail_with =", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "datetime_utc(2020, 1, 1, 0, 0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\",", ":param region: (Optional) S3 region :type region: Optional[string] :param addressing_style:", "= \"a transform\" uploader = MirrorUploader.implementation(integration) assert True == isinstance(uploader,", "result of final_mirror_url. assert ( \"final_mirror_url was called with bucket", "PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse", "DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\",", "data_source_name) # Act result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert", "S3AddressingStyle.PATH.value, ), ( \"custom_http_url_and_path_without_slash\", \"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ), (", "test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original = self._url # Create", "= \"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a", "same as its # key in the MirrorUploader implementation registry,", "value: if settings: settings[key] = value else: settings = {key:", "MultipartS3Upload, uploader, rep, rep.url) def test_upload_part(self): uploader = self._create_s3_uploader(MockS3Client) rep", "by removing all the buckets from MinIO\"\"\" super(S3UploaderTest, self).teardown_method() response", "content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read() cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location,", "0, 0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\", \"http://marc\", \"SHORT\", \"Lane\", datetime_utc(2020,", "\"Part 1\", \"UploadId\": 1, \"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\",", "\"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"),", "\"us-east-3\", ), ] ) def test_content_root(self, name, bucket, expected_result, region=None):", "**settings ) class TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME associated", "edition, pool = self._edition(with_license_pool=True) original_epub_location = \"https://books.com/a-book.epub\" epub, ignore =", "filename, scaled_size=scaled_size ) # Assert assert result == expected_result @parameterized.expand(", "aws_secret_access_key = client_class.call_args_list[0].kwargs[ \"aws_secret_access_key\" ] config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name", "def _add_settings_value(self, settings, key, value): \"\"\"Adds a value to settings", "settings=settings ) integration.username = settings.get(\"username\", \"username\") integration.password = settings.get(\"password\", \"password\")", "# better if it's the same as the name of", "settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings) uploader_class = uploader_class", "name, uploader_class, bucket_type, bucket_name, open_access, settings=None ): # Arrange book_title", "Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act result", "settings dictionary :rtype: Dict \"\"\" if value: if settings: settings[key]", "assert aws_secret_access_key == None assert config.signature_version == botocore.UNSIGNED assert (", "can specify a client class to use instead of boto3.client.\"\"\"", "creating a boto3 client inside S3Uploader\"\"\" @classmethod def setup_class(cls): \"\"\"Initializes", "\"SomeOperation\") uploader.mirror_one(epub_rep, self._url) assert None == epub_rep.mirrored_at assert None ==", "] == uploader.client.parts assert 3 == upload.part_number assert [ {\"ETag\":", "None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ),", "False == MockMultipartS3Upload.aborted assert None == rep.mirror_exception class FailingMultipartS3Upload(MockMultipartS3Upload): def", "name, bucket, expected_result, region=None): # Arrange uploader = self._create_s3_uploader(region=region) #", "during completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload ) as upload:", "external # integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader", "= self._create_s3_uploader(region=region, **settings) s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url)", ") self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url)", "S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), (", "\"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, ), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:", "[ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True,", "region=None): # Arrange uploader = self._create_s3_uploader(region=region) # Act result =", "def complete(self): MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted = True", "\"\"\"Initializes the test suite by creating a boto3 client set", "self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings) uploader_class =", "\"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020,", "None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ] ) def", "aws_secret_access_key = client_class.call_args_list[1].kwargs[ \"aws_secret_access_key\" ] assert service_name == \"s3\" assert", "ExternalIntegration, Hyperlink, Identifier, Representation, create, ) from ..s3 import (", "args2[\"ContentType\"] # In both cases, mirror_url was set to the", "# Create an SVG cover for the book. svg =", "= url_transform # Act result = uploader.final_mirror_url(bucket, key) # Assert", "( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] ) def test_content_root(self,", "0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0),", "\"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "does a lookup in .buckets uploader.buckets[\"foo\"] = object() result =", "( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "Value :type value: Any :return: Updated settings dictionary :rtype: Dict", "\"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE,", "\"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ),", "{key: value} return settings def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None,", "epub_rep, self._url) def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True) original =", "këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url( self, name,", "def abort(self): MockMultipartS3Upload.aborted = True rep, ignore = create( self._db,", "S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), (", "upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed = True def abort(self):", "MockMultipartS3Upload.completed assert False == MockMultipartS3Upload.aborted assert None == rep.mirror_exception class", "] config = client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\" assert region_name", "cls.s3_client_class = functools.partial( boto3.client, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes the", "== epub_rep.mirror_exception # An S3 credential failure is treated as", "urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None \"\"\"boto3 client connected to", "MockMultipartS3Upload.aborted = False def upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed", "), ] ) def test_cover_image_url( self, name, buckets, data_source_name, identifier,", "dict( Error=dict( Code=401, Message=\"Bad credentials\", ) ) uploader.client.fail_with = ClientError(response,", "{S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY:", "endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) def teardown_method(self): \"\"\"Deinitializes the test suite by removing", "# the exception propagates through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one,", "def setup_class(cls): \"\"\"Initializes the test suite by creating a boto3", "class to be used instead of boto3's client class :type", "\"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1, 0, 0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\",", "set and been # mirrored to those URLs. assert data1.startswith(b\"\\x89\")", "\"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601, \"us-east-3\", ), ] ) def test_cover_image_url( self,", "# Act result = uploader.url(bucket, path) # Assert assert result", "), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On", "# Assert assert result == expected_result @parameterized.expand( [ ( \"with_identifier\",", "with s3.multipart_upload( rep, rep.url, upload_class=FailingMultipartS3Upload ) as upload: upload.upload_part(\"Part 1\")", "representation, mirror_to): self.parts = [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted =", "credential failure is treated as a transient error. response =", "def test_cover_image_url( self, name, buckets, data_source_name, identifier, filename, expected_result, scaled_size=None,", "\"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\",", "S3 credential failure is treated as a transient error. response", "ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL, settings=settings, ) client_class = MagicMock() # Act S3Uploader(integration,", "), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0,", ") def test_final_mirror_url( self, name, bucket, key, expected_result, url_transform=None, region=None", "}, ), ] ) def test_mirror( self, name, uploader_class, bucket_type,", "settings = {key: value} return settings def _create_s3_uploader( self, client_class=None,", "DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\", ), ( \"with_overdrive_data_source\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/\", ), (", "_ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None \"\"\"boto3 client", "\"the këy\", \"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\", \"the këy\",", "= self._add_settings_value( settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style ) integration = self._integration(**settings) uploader_class", "\"\"\"boto3 client connected to locally running MinIO instance\"\"\" s3_client_class =", "(\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\", \"password\"), ] ) def test_initialization(self,", "{\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ], },", "MirrorUploader from ..model import ( DataSource, ExternalIntegration, Hyperlink, Identifier, Representation,", "\"dummy\", \"https://dummy.s3.amazonaws.com/dummy\", None, ), ( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None,", "upload_part(self, content): raise Exception(\"Error!\") # Failed during upload with s3.multipart_upload(", "self.SIMPLIFIED_TEST_MINIO_USER ) if settings and \"password\" not in settings: self._add_settings_value(", "MockMultipartS3Upload.completed = True def abort(self): MockMultipartS3Upload.aborted = True rep, ignore", "self.minio_s3_client.create_bucket(Bucket=bucket_name) # Act book_url = s3_uploader.book_url(identifier, open_access=open_access) s3_uploader.mirror_one(representation, book_url) #", "= MagicMock() # Act S3Uploader(integration, client_class=client_class) # Assert assert client_class.call_count", ":rtype: S3Uploader \"\"\" if settings and \"username\" not in settings:", "suite by creating a boto3 client set up with MinIO", "): # Arrange library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets", "bucket, expected_result, region=None): # Arrange uploader = self._create_s3_uploader(region=region) # Act", "DataSource.UNGLUE_IT, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\",", "self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader =", "set. assert None == epub_rep.mirror_url # A bug in the", "rep.mirror_exception @parameterized.expand( [ ( \"default_expiration_parameter\", None, int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\",", "assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name", "\"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\",", "<reponame>tdilauro/circulation-core # encoding: utf-8 import functools import os from urllib.parse", "title=None, region=None, open_access=True, ): # Arrange identifier = self._identifier(foreign_id=identifier) uploader", "**buckets) # Act result = uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size", "def test_cover_image_root( self, name, bucket, data_source_name, expected_result, scaled_size=None, region=None, ):", "result = uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size ) # Assert", "MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] ) def test_instantiation(self): integration = self._external_integration( ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL )", "@parameterized.expand( [ (\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\", \"username\",", "False, ), ( \"using_minio_uploader_and_open_access_bucket\", MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL:", "mirrored to those URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1", "] ) def test_url( self, name, bucket, path, expected_result, region=None,", "== expected_result @parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\",", "Books\", \"us-east-3\", False, ), ] ) def test_book_url( self, name,", "{ MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def test_mirror( self,", "MinIO credentials\"\"\" super(S3UploaderIntegrationTest, cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD,", "pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self): edition, pool = self._edition(with_license_pool=True)", "aws_access_key_id == None assert aws_secret_access_key == None assert config.signature_version ==", "( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_without_slash\", \"https://a-bucket.com/\",", "s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) # Act result", "# Because the file was not successfully uploaded, # final_mirror_url", "\"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ),", "\"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", False, ), ]", "{\"ETag\": \"etag\", \"PartNumber\": 2}, ] == upload.parts uploader.client.fail_with = Exception(\"Error!\")", "MinIOUploader, S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), (", "DataSource.lookup(self._db, data_source_name) identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) #", "Updated settings dictionary :rtype: Dict \"\"\" if value: if settings:", "s3_uploader.client.generate_presigned_url.assert_called_once_with( \"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest):", "), ] ) def test_mirror( self, name, uploader_class, bucket_type, bucket_name,", "\"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_path_with_slash\", \"a-bucket\", \"/a-path\",", "\"books.mrc\" == upload.filename assert 1 == upload.part_number assert [] ==", "if settings: settings.update(buckets) else: settings = buckets s3_uploader = self._create_s3_uploader(", ") def test_url( self, name, bucket, path, expected_result, region=None, addressing_style=None", "# Arrange s3_uploader = self._create_s3_uploader() # Act result = s3_uploader.split_url(url,", "MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort() assert []", "\"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"},", "S3UploaderIntegrationTest(S3UploaderTest): SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get(", "Custom class to be used instead of boto3's client class", "style :type addressing_style: Optional[string] :param settings: Kwargs used for initializing", "), ( \"with_scaled_size_and_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename\", 601,", "( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\",", "\"http://a-bucket.com/\", \"a-path\", \"http://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"http://a-bucket.com/\", \"/a-path\", \"http://a-bucket.com/a-path\",", "( \"final_mirror_url was called with bucket covers-go, key here.png\" ==", ") class TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME associated with", "), ] ) def test_content_root(self, name, bucket, expected_result, region=None): #", "self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) assert uploader == upload.uploader", "\"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020,", "\"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" ) SIMPLIFIED_TEST_MINIO_PASSWORD", "# Failed during completion with s3.multipart_upload( rep, rep.url, upload_class=AnotherFailingMultipartS3Upload )", "mirror_to): self.parts = [] MockMultipartS3Upload.completed = False MockMultipartS3Upload.aborted = False", "TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY,", "= epub.resource.representation assert None == epub_rep.mirrored_at s3 = self._create_s3_uploader(client_class=MockS3Client) #", "\"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def", "= False def upload_part(self, content): self.parts.append(content) def complete(self): MockMultipartS3Upload.completed =", "test_cover_image_url( self, name, buckets, data_source_name, identifier, filename, expected_result, scaled_size=None, region=None,", "0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1,", "\"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\", DataSource.UNGLUE_IT, \"On Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY:", "cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub, ignore", "key2, args2, ignore2], ] = s3.client.uploads # Both representations have", "which will be used insted of S3Uploader :type uploader_class: Optional[Type]", "self, name, uploader_class, bucket_type, bucket_name, open_access, settings=None ): # Arrange", "buckets, data_source_name, identifier, filename, expected_result, scaled_size=None, region=None, ): # identifier", "assert None == cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub, ignore =", ") def test_content_root(self, name, bucket, expected_result, region=None): # Arrange uploader", "upload.upload_part(\"Part 2\") assert [\"Part 1\", \"Part 2\"] == upload.parts assert", "[] == upload.parts assert 1 == upload.upload.get(\"UploadId\") uploader.client.fail_with = Exception(\"Error!\")", "uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None, None), (\"empty_string_credentials\", \"\", \"\"), (\"non_empty_string_credentials\",", "\"explicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, ), ( \"explicit_s3_url_template_with_custom_region\", \"bucket\",", "specify a client class to use instead of boto3.client.\"\"\" integration", "ignore = create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE, ) s3 =", "def test_marc_file_url( self, name, bucket, library_name, lane_name, end_time, expected_result, start_time=None,", "s3.client.uploads # Both representations have had .mirror_url set and been", ") class TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore = create( self._db,", "the same as the name of the external # integration.", "client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[1].kwargs[", "== key2 assert Representation.EPUB_MEDIA_TYPE == args2[\"ContentType\"] # In both cases,", "2\"] == upload.parts assert True == MockMultipartS3Upload.completed assert False ==", "), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\",", "client_class.call_args_list[0].args[0] region_name = client_class.call_args_list[0].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[0].kwargs[\"aws_access_key_id\"] aws_secret_access_key = client_class.call_args_list[0].kwargs[", ") def test_split_url(self, name, url, expected_result, unquote=True): # Arrange s3_uploader", "\"test-bucket\", True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader,", "= client_class.call_args_list[0].kwargs[\"config\"] assert service_name == \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION", "Error=dict( Code=401, Message=\"Bad credentials\", ) ) uploader.client.fail_with = ClientError(response, \"SomeOperation\")", "not treated as a transient error -- # the exception", "== epub_rep.mirrored_at assert None == epub_rep.mirror_exception # Because the file", "settings dictionary :param settings: Settings dictionary :type settings: Dict :param", "= bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\", []):", "\"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\", \"the", "+ \"?AWSAccessKeyId=KEY&Expires=1&Signature=S\" settings = expiration_settings if expiration_settings else {} s3_uploader", "assert result == expected_result def test_key_join(self): \"\"\"Test the code used", "None) assert \"config\" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can", "None, \"On Books\", ), ( \"with_custom_extension_and_title_and_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\",", "connected to locally running MinIO instance\"\"\" s3_client_class = None \"\"\"Factory", "# The URL_TEMPLATE_KEY setting becomes the .url_transform # attribute on", "bucket_type: bucket_name, } if settings: settings.update(buckets) else: settings = buckets", "# 'Upload' it to S3. s3 = self._create_s3_uploader(MockS3Client) s3.mirror_one(hyperlink.resource.representation, self._url)", "MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle, S3Uploader, S3UploaderConfiguration, ) from ..testing import", "cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class =", "\"SHORT\", \"Lane\", datetime_utc(2020, 1, 1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ),", "cover_rep = cover.resource.representation assert None == cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\"", "), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\", \"https://s3.amazonaws.com/dummy/dummy\", None, S3AddressingStyle.PATH.value, ), (", "library = self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket, library) #", "\"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\", ),", "# the right arguments def mock_final_mirror_url(bucket, key): return \"final_mirror_url was", "= self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\": identifier, \"open_access\": open_access} if", "In both cases, mirror_url was set to the result of", "= client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"] aws_secret_access_key =", "assert result == expected_result @parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"},", "value else: settings = {key: value} return settings def _create_s3_uploader(", "def test_content_root(self, name, bucket, expected_result, region=None): # Arrange uploader =", "rx=\"50\" ry=\"25\" style=\"fill:blue;\"/> </svg>\"\"\" hyperlink, ignore = pool.add_link( Hyperlink.IMAGE, original,", "irrelevant 'not-a-bucket-at-all' # setting. assert buckets == uploader.buckets # get_bucket", "return rep def test_init(self): uploader = self._create_s3_uploader(MockS3Client) rep = self._representation()", "), ] ) def test_split_url(self, name, url, expected_result, unquote=True): #", "S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ] ) def test_mirror( self, name, uploader_class,", "@parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\",", "None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ), ]", "), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://a-bucket.s3.us-east-3.amazonaws.com/a-path\", \"us-east-3\", ), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\",", "identifier, expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True, ): # Arrange", "@parameterized.expand( [ ( \"without_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\",", "(utc_now() - cover_rep.mirrored_at).seconds < 10 assert b\"i'm an epub\" ==", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\", ), (", "S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id =", "True == isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY setting becomes the", "\"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"http_url\", \"http://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ),", "through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def test_svg_mirroring(self):", "in epub_rep, cover_rep: assert (utc_now() - rep.mirrored_at).seconds < 10 def", "s3_uploader.mirror_one(representation, book_url) # Assert response = self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in", "and \"password\" not in settings: self._add_settings_value( settings, \"password\", self.SIMPLIFIED_TEST_MINIO_PASSWORD )", "\"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ),", "else: settings = buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings )", "when the representation was 'mirrored' for rep in epub_rep, cover_rep:", "\"using_s3_uploader_and_open_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), (", "], }, } ] == uploader.client.uploads def test_abort(self): uploader =", ") _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL )", "\"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\", ), ( \"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value,", "Optional[Dict] :return: New intance of S3 uploader :rtype: S3Uploader \"\"\"", "assert \"config\" not in client_class.call_args_list[1].kwargs def test_custom_client_class(self): \"\"\"You can specify", "int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), ), ( \"empty_expiration_parameter\", {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, 100, ), ] )", "expiration_settings if expiration_settings else {} s3_uploader = self._create_s3_uploader(region=region, **settings) s3_uploader.split_url", "not in data def test_multipart_upload(self): class MockMultipartS3Upload(MultipartS3Upload): completed = None", "# Arrange uploader = self._create_s3_uploader(region=region) data_source = DataSource.lookup(self._db, data_source_name) #", "settings: Settings dictionary :type settings: Dict :param key: Key :type", "result = uploader.content_root(bucket) # Assert assert result == expected_result @parameterized.expand(", "# Assert assert result == expected_result @parameterized.expand( [ ( \"implicit_s3_url_template\",", "result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) # Assert assert result ==", "\"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert", "response = self.minio_s3_client.list_objects(Bucket=bucket_name) for object in response.get(\"Contents\", []): object_key =", "# Arrange library = self._library(short_name=library_name) lane = self._lane(display_name=lane_name) buckets =", "), ] ) def test_final_mirror_url( self, name, bucket, key, expected_result,", "= uploader.marc_file_root(bucket, library) # Assert assert result == expected_result @parameterized.expand(", "response assert len(response[\"Contents\"]) == 1 [object] = response[\"Contents\"] assert object[\"Key\"]", "None == cover_rep.mirrored_at original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link(", "self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act result = uploader.marc_file_root(bucket, library)", "if username != \"\" else None) assert aws_secret_access_key == (password", "original_epub_location = \"https://books.com/a-book.epub\" epub, ignore = pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source,", "( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_scaled_size_and_custom_region\",", "!= \"\" else None) assert \"config\" not in client_class.call_args_list[1].kwargs def", "), ( \"s3_path_style_url_with_custom_region_and_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.us-east-3.amazonaws.com/a-bucket/a-path\", \"us-east-3\", S3AddressingStyle.PATH.value, ), (", "is treated as a transient error. response = dict( Error=dict(", "Message=\"Bad credentials\", ) ) uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep, self._url)", "import MirrorUploader from ..model import ( DataSource, ExternalIntegration, Hyperlink, Identifier,", "= self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\": identifier,", "] ) def test_cover_image_url( self, name, buckets, data_source_name, identifier, filename,", "= MultipartS3Upload(uploader, rep, rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.abort() assert", "assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert aws_secret_access_key", "attribute on the S3Uploader object. assert \"a transform\" == uploader.url_transform", "[ {\"ETag\": \"etag\", \"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ]", "and it's # better if it's the same as the", ") minio_s3_client = None \"\"\"boto3 client connected to locally running", "scaled_size=scaled_size) # Assert assert result == expected_result @parameterized.expand( [ (", "), ( \"with_s3_bucket_and_end_time_and_start_time_and_custom_region\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0,", "had .mirror_url set and been # mirrored to those URLs.", "those URLs. assert data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1 assert \"here.png\"", "pytest.raises(Exception, upload.upload_part, \"Part 3\") def test_complete(self): uploader = self._create_s3_uploader(MockS3Client) rep", "is treated as a transient error. uploader.client.fail_with = BotoCoreError() uploader.mirror_one(epub_rep,", "# unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) # identifier = self._identifier(foreign_id=\"ABOOK\") #", "mirror_url was set to the result of final_mirror_url. assert (", "object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None,", "\"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\",", "Assert assert result == expected_result def test_mirror_one(self): edition, pool =", "in response.get(\"Contents\", []): object_key = object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def", "os.environ.get( \"SIMPLIFIED_TEST_MINIO_ENDPOINT_URL\", \"http://localhost:9000\" ) SIMPLIFIED_TEST_MINIO_USER = os.environ.get( \"SIMPLIFIED_TEST_MINIO_USER\", \"minioadmin\" )", "\"a transform\" == uploader.url_transform @parameterized.expand( [ (\"empty_credentials\", None, None), (\"empty_string_credentials\",", "MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets = { S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "= { bucket_type: bucket_name, } if settings: settings.update(buckets) else: settings", "# The NAME associated with this class must be the", "\"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\", \"https://bucket.s3.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"),", "= buckets s3_uploader = self._create_s3_uploader( uploader_class=uploader_class, **settings ) self.minio_s3_client.create_bucket(Bucket=bucket_name) #", "\"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY,", "\"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ]", "of final_mirror_url. assert ( \"final_mirror_url was called with bucket books-go,", "\"UploadId\": 1, \"MultipartUpload\": { \"Parts\": [ {\"ETag\": \"etag\", \"PartNumber\": 1},", "# integration. assert S3Uploader.NAME == ExternalIntegration.S3 assert ( S3Uploader ==", "if settings and \"password\" not in settings: self._add_settings_value( settings, \"password\",", "as its # key in the MirrorUploader implementation registry, and", "[ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub\", ), ( \"with_custom_extension\",", "S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url( self, name, bucket, key,", "), ] ) def test_url( self, name, bucket, path, expected_result,", "uploader.content_root(bucket) # Assert assert result == expected_result @parameterized.expand( [ (", "def complete(self): raise Exception(\"Error!\") rep.mirror_exception = None # Failed during", "identifier = self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) parameters = {\"identifier\":", "FailingMultipartS3Upload(MockMultipartS3Upload): def upload_part(self, content): raise Exception(\"Error!\") # Failed during upload", "[ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\",", "Assert assert client_class.call_count == 2 service_name = client_class.call_args_list[0].args[0] region_name =", "book_url = \"http://books-go/here.epub\" cover_url = \"http://s3.amazonaws.com/covers-go/here.png\" s3.mirror_one(cover.resource.representation, cover_url) s3.mirror_one(epub.resource.representation, book_url)", "SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None \"\"\"boto3 client connected to locally", "integration.username = settings.get(\"username\", \"username\") integration.password = settings.get(\"password\", \"password\") return integration", "MockMultipartS3Upload(MultipartS3Upload): completed = None aborted = None def __init__(self, uploader,", ") service_name = client_class.call_args_list[1].args[0] region_name = client_class.call_args_list[1].kwargs[\"region_name\"] aws_access_key_id = client_class.call_args_list[1].kwargs[\"aws_access_key_id\"]", "Assert assert result == expected_result @parameterized.expand( [ ( \"with_identifier\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY:", "Optional[string] :param settings: Kwargs used for initializing an external integration", "_, _, _ = urlsplit( SIMPLIFIED_TEST_MINIO_ENDPOINT_URL ) minio_s3_client = None", "Representation.SVG_MEDIA_TYPE == args[\"ContentType\"] assert b\"svg\" in data assert b\"PNG\" not", "setup_class(cls): \"\"\"Initializes the test suite by creating a boto3 client", "0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\", \"Lane\", datetime_utc(2020,", "result = uploader.get_bucket(\"foo\") assert uploader.buckets[\"foo\"] == result @parameterized.expand( [ (", "== MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self):", "\"directory/filename+with+spaces%21.jpg\"), False, ), ] ) def test_split_url(self, name, url, expected_result,", "class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\") rep.mirror_exception = None #", "SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\"", "creating a boto3 client set up with MinIO credentials\"\"\" super(S3UploaderIntegrationTest,", "goal=ExternalIntegration.STORAGE_GOAL ) integration.username = \"your-access-key\" integration.password = \"<PASSWORD>\" integration.setting( S3UploaderConfiguration.URL_TEMPLATE_KEY", "\"On Books\", \"us-east-3\", ), ( \"with_protected_access_and_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\",", "== upload.aborted upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") assert [\"Part 1\", \"Part", "= uploader.book_url(**parameters) # Assert assert result == expected_result @parameterized.expand( [", ":return: New intance of S3 uploader :rtype: S3Uploader \"\"\" if", "= True rep, ignore = create( self._db, Representation, url=\"http://books.mrc\", media_type=Representation.MARC_MEDIA_TYPE,", ") from ..s3 import ( MinIOUploader, MinIOUploaderConfiguration, MockS3Client, MultipartS3Upload, S3AddressingStyle,", "def test_names(self): # The NAME associated with this class must", "None == epub_rep.mirrored_at assert None == epub_rep.mirror_exception # Because the", "client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest, self)._create_s3_uploader( client_class, uploader_class, region,", "region=region, addressing_style=addressing_style ) # Act result = uploader.url(bucket, path) #", "}, { \"Body\": \"Part 2\", \"UploadId\": 1, \"PartNumber\": 2, \"Bucket\":", "\"get_object\", ExpiresIn=expected_expiration, Params={\"Bucket\": bucket, \"Key\": filename}, ) class TestMultiPartS3Upload(S3UploaderTest): def", "settings.get(\"password\", \"password\") return integration def _add_settings_value(self, settings, key, value): \"\"\"Adds", ":param value: Value :type value: Any :return: Updated settings dictionary", "instead of boto3.client.\"\"\" integration = self._integration() uploader = S3Uploader(integration, MockS3Client)", "'not-a-bucket-at-all' # setting. assert buckets == uploader.buckets # get_bucket just", "\"directory/filename.jpg\"), ), ( \"s3_path_style_request_with_region\", \"https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_global_endpoint\",", "cover, ignore = pool.add_link( Hyperlink.IMAGE, original_cover_location, edition.data_source, Representation.PNG_MEDIA_TYPE, content=content, )", "S3Uploader) # The URL_TEMPLATE_KEY setting becomes the .url_transform # attribute", "\"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://a-bucket.s3.us-east-2.amazonaws.com/a-path\", \"us-east-2\",", "settings=None ): # Arrange book_title = \"1234567890\" book_content = \"1234567890\"", "1\") upload.upload_part(\"Part 2\") upload.complete() assert [ { \"Bucket\": \"bucket\", \"Key\":", "== S3Uploader.key_join(parts) ) @parameterized.expand( [ ( \"with_gutenberg_cover_generator_data_source\", \"test-book-covers-s3-bucket\", DataSource.GUTENBERG_COVER_GENERATOR, \"https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/\",", "== upload.parts uploader.client.fail_with = Exception(\"Error!\") pytest.raises(Exception, upload.upload_part, \"Part 3\") def", "{S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \"pdf\", ), ( \"with_custom_dotted_extension\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"},", "upload.uploader assert rep == upload.representation assert \"bucket\" == upload.bucket assert", ") epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A network", "\"https://a-bucket.com/a-path\", None, ), ( \"custom_http_url_and_path_with_slash\", \"https://a-bucket.com/\", \"/a-path\", \"https://a-bucket.com/a-path\", None, ),", "upload: assert [] == upload.parts assert False == upload.completed assert", "== (password if password != \"\" else None) assert \"config\"", "\"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\", \"dummy\", \"dummy\",", "pool.add_link( Hyperlink.OPEN_ACCESS_DOWNLOAD, original_epub_location, edition.data_source, Representation.EPUB_MEDIA_TYPE, content=\"i'm an epub\", ) epub_rep", "[] == uploader.client.parts @pytest.mark.minio class TestS3UploaderIntegration(S3UploaderIntegrationTest): @parameterized.expand( [ ( \"using_s3_uploader_and_open_access_bucket\",", "= None aborted = None def __init__(self, uploader, representation, mirror_to):", "True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial( S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, \"test-bucket\",", "data_source, identifier, filename, scaled_size=scaled_size ) # Assert assert result ==", "region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert aws_secret_access_key ==", "bucket covers-go, key here.png\" == cover_rep.mirror_url ) # mirrored-at was", "== \"s3\" assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None", ":rtype: Dict \"\"\" if value: if settings: settings[key] = value", "name, bucket, library_name, lane_name, end_time, expected_result, start_time=None, region=None, ): #", "the book. svg = \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\"", "cls).setup_class() cls.minio_s3_client = boto3.client( \"s3\", aws_access_key_id=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_USER, aws_secret_access_key=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_PASSWORD, endpoint_url=TestS3UploaderIntegration.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, ) cls.s3_client_class", "epub_rep = epub.resource.representation uploader = self._create_s3_uploader(MockS3Client) # A network failure", "\"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ), ] )", "parameters[\"data_source\"] = data_source # Act result = uploader.book_url(**parameters) # Assert", "pool = self._edition(with_license_pool=True) original_cover_location = \"http://example.com/a-cover.png\" content = open(self.sample_cover_path(\"test-book-cover.png\"), \"rb\").read()", "assert data1.startswith(b\"\\x89\") assert \"covers-go\" == bucket1 assert \"here.png\" == key1", "DataSource.lookup(self._db, DataSource.UNGLUE_IT) parameters[\"data_source\"] = data_source # Act result = uploader.book_url(**parameters)", "extension if title: parameters[\"title\"] = title if data_source_name: data_source =", "), ( \"s3_virtual_hosted_style_request_with_dashed_region\", \"https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg\", (\"bucket\", \"directory/filename.jpg\"), ), ( \"s3_virtual_hosted_style_request_with_dotted_region\", \"https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg\",", "True, { MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL }, ), ( \"using_minio_uploader_and_protected_access_bucket\", MinIOUploader, S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY,", "\"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ), ( \"with_custom_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/\", \"us-east-3\", ),", "\"On Books\", ), ( \"with_custom_extension_and_title_and_data_source_and_region\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf\", \".pdf\",", "\"filename\", scaled_size=601)) # Arrange data_source = DataSource.lookup(self._db, data_source_name) identifier =", "S3Uploader(integration, MockS3Client) assert isinstance(uploader.client, MockS3Client) def test_get_bucket(self): buckets = {", "# eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', # m(unglueit, identifier, \"filename\", scaled_size=601)) # Arrange data_source", "= \"1234567890\" identifier = Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content,", ").value = \"a transform\" uploader = MirrorUploader.implementation(integration) assert True ==", "\"PartNumber\": 1}, {\"ETag\": \"etag\", \"PartNumber\": 2}, ], }, } ]", "for bucket in response[\"Buckets\"]: bucket_name = bucket[\"Name\"] response = self.minio_s3_client.list_objects(Bucket=bucket_name)", "used for creating a boto3 client inside S3Uploader\"\"\" @classmethod def", "== uploader.buckets # get_bucket just does a lookup in .buckets", "failure is treated as a transient error. uploader.client.fail_with = BotoCoreError()", "\"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS, ), ] ) def test_final_mirror_url( self,", "= self._identifier(foreign_id=\"ABOOK\") # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} # uploader", "library_name, expected_result, region=None ): # Arrange uploader = self._create_s3_uploader(region=region) library", "Code=401, Message=\"Bad credentials\", ) ) uploader.client.fail_with = ClientError(response, \"SomeOperation\") uploader.mirror_one(epub_rep,", "implementation registry, and it's # better if it's the same", "S3UploaderConfiguration.URL_TEMPLATE_KEY ).value = \"a transform\" uploader = MirrorUploader.implementation(integration) assert True", "\"s3_path_style_url_with_custom_region_and_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.us-east-2.amazonaws.com/a-bucket/a-path\", \"us-east-2\", S3AddressingStyle.PATH.value, ), ( \"s3_url_with_custom_region_and_path_with_slash\", \"a-bucket\",", "\"https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", ), ( \"without_scaled_size_and_with_custom_region\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"}, DataSource.UNGLUE_IT, \"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\",", "self, name, buckets, identifier, expected_result, extension=None, data_source_name=None, title=None, region=None, open_access=True,", "[] == upload.parts assert False == upload.completed assert False ==", "Act result = uploader.cover_image_url( data_source, identifier, filename, scaled_size=scaled_size ) #", "username, password): # Arrange settings = {\"username\": username, \"password\": password}", "# Arrange uploader = self._create_s3_uploader( region=region, addressing_style=addressing_style ) # Act", "args1, ignore1], [data2, bucket2, key2, args2, ignore2], ] = s3.client.uploads", "\"Part 2\"] == upload.parts assert True == MockMultipartS3Upload.completed assert False", "it's # better if it's the same as the name", "uploader.buckets # get_bucket just does a lookup in .buckets uploader.buckets[\"foo\"]", "None, \"us-east-3\", ), ( \"with_overdrive_data_source_and_custom_region\", \"test-book-covers-s3-bucket\", DataSource.OVERDRIVE, \"https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/\", None, \"us-east-3\",", "= MirrorUploader.implementation(integration) assert True == isinstance(uploader, S3Uploader) # The URL_TEMPLATE_KEY", "self._identifier(foreign_id=identifier) uploader = self._create_s3_uploader(region=region, **buckets) # Act result = uploader.cover_image_url(", "final_mirror_url so we can verify that it's called with #", "as upload: assert [] == upload.parts assert False == upload.completed", "(password if password != \"\" else None) assert \"config\" not", "\"SHORT\", \"https://my-feed/SHORT/\"), ] ) def test_marc_file_root( self, name, bucket, library_name,", "# Assert assert result == expected_result def test_mirror_one(self): edition, pool", "key1 assert Representation.PNG_MEDIA_TYPE == args1[\"ContentType\"] assert (utc_now() - cover_rep.mirrored_at).seconds <", "object[\"Key\"] self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) self.minio_s3_client.delete_bucket(Bucket=bucket_name) def _create_s3_uploader( self, client_class=None, uploader_class=None, region=None,", "( \"implicit_s3_url_template\", \"bucket\", \"the key\", \"https://bucket.s3.amazonaws.com/the%20key\", ), ( \"implicit_s3_url_template_with_custom_region\", \"bucket\",", "result == expected_result @parameterized.expand( [ ( \"with_default_region\", \"test-open-access-s3-bucket\", \"https://test-open-access-s3-bucket.s3.amazonaws.com/\", ),", "1\", \"UploadId\": 1, \"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", },", "(\"book-covers.nypl.org\", \"directory/filename+with+spaces%21.jpg\"), False, ), ] ) def test_split_url(self, name, url,", "name, expiration_settings, expected_expiration): # Arrange region = \"us-east-1\" bucket =", "AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise Exception(\"Error!\") rep.mirror_exception = None # Failed", "Mock final_mirror_url so we can verify that it's called with", "# attribute on the S3Uploader object. assert \"a transform\" ==", "start_time) # Assert assert result == expected_result @parameterized.expand( [ (", "1\") upload.upload_part(\"Part 2\") assert [\"Part 1\", \"Part 2\"] == upload.parts", "\"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf\", \".pdf\", ), ( \"with_custom_data_source\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub\",", "( \"with_custom_title\", {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: \"thebooks\"}, \"ABOOK\", \"https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub\", None, None, \"On Books\",", "1, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc\", ), ( \"with_s3_bucket_and_end_time_and_start_time\", \"marc\", \"SHORT\",", "MirrorUploader implementation registry, and it's # better if it's the", "\"SHORT\", \"Lane\", datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020,", "rep.url) upload.upload_part(\"Part 1\") upload.upload_part(\"Part 2\") upload.complete() assert [ { \"Bucket\":", "== S3UploaderConfiguration.S3_DEFAULT_REGION assert aws_access_key_id == None assert aws_secret_access_key == None", "), ] ) def test_cover_image_root( self, name, bucket, data_source_name, expected_result,", "datetime_utc(2020, 1, 1, 0, 0, 0), \"us-east-2\", ), ( \"with_http_bucket_and_end_time_and_start_time\",", "<svg xmlns=\"http://www.w3.org/2000/svg\" width=\"100\" height=\"50\"> <ellipse cx=\"50\" cy=\"25\" rx=\"50\" ry=\"25\" style=\"fill:blue;\"/>", "was called with bucket %s, key %s\" % (bucket, key)", "from mock import MagicMock from parameterized import parameterized from ..mirror", "\"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, \"us-east-2\", ), ( \"http_url_template\", \"bucket\", \"the", "\"ABOOK\", \"filename\", \"https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename\", None, \"us-east-3\", ), ( \"with_scaled_size\", {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: \"thecovers\"},", "S3Uploader, host=S3UploaderIntegrationTest.SIMPLIFIED_TEST_MINIO_HOST, ), S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, \"test-bucket\", True, ), ( \"using_s3_uploader_and_protected_access_bucket\", functools.partial(", "code is not treated as a transient error -- #", "1, \"PartNumber\": 1, \"Bucket\": \"bucket\", \"Key\": \"books.mrc\", }, { \"Body\":", "self.SIMPLIFIED_TEST_MINIO_PASSWORD ) if not client_class: client_class = self.s3_client_class return super(S3UploaderIntegrationTest,", "Arrange uploader = self._create_s3_uploader(region=region) library = self._library(short_name=library_name) # Act result", "datetime_utc(2020, 1, 2, 0, 0, 0), \"https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc\", datetime_utc(2020, 1, 1,", "Identifier(type=Identifier.ISBN, identifier=book_title) representation = Representation( content=book_content, media_type=Representation.EPUB_MEDIA_TYPE ) buckets =", "), ( \"https_url\", \"https://book-covers.nypl.org/directory/filename.jpg\", (\"book-covers.nypl.org\", \"directory/filename.jpg\"), ), ( \"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\",", "\"http_url_with_escaped_symbols\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\", (\"book-covers.nypl.org\", \"directory/filename with spaces!.jpg\"), ), ( \"http_url_with_escaped_symbols_but_unquote_set_to_false\", \"http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg\",", "= \"\"\"<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.1//EN\" \"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd\"> <svg xmlns=\"http://www.w3.org/2000/svg\"", "covers-go, key here.png\" == cover_rep.mirror_url ) # mirrored-at was set", "\"\"\" if settings and \"username\" not in settings: self._add_settings_value( settings,", "successfully uploaded, # final_mirror_url was never called and mirror_url is", ") integration.username = settings.get(\"username\", \"username\") integration.password = settings.get(\"password\", \"password\") return", "# mirrored-at was set when the representation was 'mirrored' for", "= self.minio_s3_client.list_objects(Bucket=bucket_name) assert \"Contents\" in response assert len(response[\"Contents\"]) == 1", "\"https://a-bucket.s3.amazonaws.com/a-path\", None, ), ( \"s3_path_style_url_with_path_with_slash\", \"a-bucket\", \"/a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value,", "TestMultiPartS3Upload(S3UploaderTest): def _representation(self): rep, ignore = create( self._db, Representation, url=\"http://bucket/books.mrc\",", "the code used to build S3 keys from parts.\"\"\" parts", "rep = self._representation() upload = MultipartS3Upload(uploader, rep, rep.url) assert uploader", "[ { \"Body\": \"Part 1\", \"UploadId\": 1, \"PartNumber\": 1, \"Bucket\":", "== None assert config.signature_version == botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] ==", "# encoding: utf-8 import functools import os from urllib.parse import", "addressing_style, **settings ) class TestS3Uploader(S3UploaderTest): def test_names(self): # The NAME", "0, 0, 0), ), ( \"with_https_bucket_and_end_time_and_start_time\", \"https://marc\", \"SHORT\", \"Lane\", datetime_utc(2020,", "( \"s3_path_style_url_with_path_without_slash\", \"a-bucket\", \"a-path\", \"https://s3.amazonaws.com/a-bucket/a-path\", None, S3AddressingStyle.PATH.value, ), ( \"s3_path_style_dummy_url_with_path_without_slash\",", "import urlsplit import boto3 import botocore import pytest from botocore.exceptions", "\"bucket\", \"the key\", \"https://bucket.s3.us-east-2.amazonaws.com/the%20key\", None, \"us-east-2\", ), ( \"explicit_s3_url_template\", \"bucket\",", "if url_transform: uploader.url_transform = url_transform # Act result = uploader.final_mirror_url(bucket,", "== botocore.UNSIGNED assert ( config.s3[\"addressing_style\"] == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE ) service_name =", "propagates through. uploader.client.fail_with = Exception(\"crash!\") pytest.raises(Exception, uploader.mirror_one, epub_rep, self._url) def", "== upload.filename assert 1 == upload.part_number assert [] == upload.parts", "def _integration(self, **settings): \"\"\"Create and configure a simple S3 integration.\"\"\"", "MockMultipartS3Upload.aborted assert \"Error!\" == rep.mirror_exception class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): def complete(self): raise", "assert \"covers-go\" == bucket1 assert \"here.png\" == key1 assert Representation.PNG_MEDIA_TYPE", "import os from urllib.parse import urlsplit import boto3 import botocore", "\"http://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTP, ), ( \"https_url_template\", \"bucket\", \"the këy\", \"https://bucket/the%20k%C3%ABy\", S3UploaderConfiguration.URL_TEMPLATE_HTTPS,", "uploaded, # final_mirror_url was never called and mirror_url is #", "1234, \"Die Flügelmaus+.epub\"] assert ( \"Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub\" == S3Uploader.key_join(parts) ) @parameterized.expand(", "set to the result of final_mirror_url. assert ( \"final_mirror_url was" ]
[ "travis_tag): return 'release' return 'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))", "build version to be 'qa', 'rc', 'release'\"\"\" import sys import", "build type=%s, build commit=%s\", build_type, travis_commit) with open(build_type_path, 'w') as", "= os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build type file: %s\", build_type_path)", "with open(build_type_path, 'w') as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\")", "logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag: return \"qa\"", "travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return 'qa'", "'lbry', 'build_type.py') log.debug(\"configuring build type file: %s\", build_type_path) travis_commit =", "version to be 'qa', 'rc', 'release'\"\"\" import sys import os", "to be 'qa', 'rc', 'release'\"\"\" import sys import os import", "commit=%s\", build_type, travis_commit) with open(build_type_path, 'w') as f: f.write(f\"BUILD =", "get_build_type(travis_tag=None): if not travis_tag: return \"qa\" log.debug(\"getting build type for", "log.debug(\"getting build type for tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag):", "log.debug(\"configuring build type file: %s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type", "travis_tag: return \"qa\" log.debug(\"getting build type for tag: \\\"%s\\\"\", travis_tag)", "get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s, build commit=%s\", build_type, travis_commit) with", "'release'\"\"\" import sys import os import re import logging log", "for tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif", "as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\") if __name__ ==", "f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\") if __name__ == '__main__': sys.exit(main())", "log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag: return \"qa\" log.debug(\"getting build", "def get_build_type(travis_tag=None): if not travis_tag: return \"qa\" log.debug(\"getting build type", "= os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s, build", "import sys import os import re import logging log =", "log.debug(\"setting build type=%s, build commit=%s\", build_type, travis_commit) with open(build_type_path, 'w')", "os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s, build commit=%s\",", "= os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build type", "build commit=%s\", build_type, travis_commit) with open(build_type_path, 'w') as f: f.write(f\"BUILD", "build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build", "import logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if", "if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release'", "%s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting", "travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return", "re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return", "\\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag):", "travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s,", "'qa', 'rc', 'release'\"\"\" import sys import os import re import", "type for tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc'", "type file: %s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG',", "build_type = get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s, build commit=%s\", build_type,", "return 'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir,", "build_type, travis_commit) with open(build_type_path, 'w') as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT", "open(build_type_path, 'w') as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\") if", "os import re import logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG)", "= get_build_type(os.environ.get('TRAVIS_TAG', None)) log.debug(\"setting build type=%s, build commit=%s\", build_type, travis_commit)", "be 'qa', 'rc', 'release'\"\"\" import sys import os import re", "log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag:", "return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return 'qa' def", "tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return 'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$',", "= logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag: return", "log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not travis_tag: return \"qa\" log.debug(\"getting", "def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py')", "file: %s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type = get_build_type(os.environ.get('TRAVIS_TAG', None))", "elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return 'qa' def main(): root_dir", "'rc' elif re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return 'qa' def main():", "'rc', 'release'\"\"\" import sys import os import re import logging", "\"\"\"Set the build version to be 'qa', 'rc', 'release'\"\"\" import", "'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry',", "build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build type file: %s\",", "os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build type file:", "the build version to be 'qa', 'rc', 'release'\"\"\" import sys", "re import logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None):", "re.match(r'v\\d+\\.\\d+\\.\\d+$', travis_tag): return 'release' return 'qa' def main(): root_dir =", "main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring", "build type file: %s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6] build_type =", "not travis_tag: return \"qa\" log.debug(\"getting build type for tag: \\\"%s\\\"\",", "import re import logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def", "None)) log.debug(\"setting build type=%s, build commit=%s\", build_type, travis_commit) with open(build_type_path,", "os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build type file: %s\", build_type_path) travis_commit", "travis_commit) with open(build_type_path, 'w') as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT =", "'build_type.py') log.debug(\"configuring build type file: %s\", build_type_path) travis_commit = os.environ['TRAVIS_COMMIT'][:6]", "f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\") if __name__ == '__main__':", "return \"qa\" log.debug(\"getting build type for tag: \\\"%s\\\"\", travis_tag) if", "sys import os import re import logging log = logging.getLogger()", "'release' return 'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path =", "'w') as f: f.write(f\"BUILD = \\\"{build_type}\\\"\\nBUILD_COMMIT = \\\"{travis_commit}\\\"\\n\") if __name__", "root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path = os.path.join(root_dir, 'lbry', 'build_type.py') log.debug(\"configuring build", "if not travis_tag: return \"qa\" log.debug(\"getting build type for tag:", "build type for tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$', travis_tag): return", "\"qa\" log.debug(\"getting build type for tag: \\\"%s\\\"\", travis_tag) if re.match(r'v\\d+\\.\\d+\\.\\d+rc\\d+$',", "return 'release' return 'qa' def main(): root_dir = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) build_type_path", "import os import re import logging log = logging.getLogger() log.addHandler(logging.StreamHandler())", "logging log = logging.getLogger() log.addHandler(logging.StreamHandler()) log.setLevel(logging.DEBUG) def get_build_type(travis_tag=None): if not", "type=%s, build commit=%s\", build_type, travis_commit) with open(build_type_path, 'w') as f:" ]
[ "print orderer_list print 'peer_list: ' print peer_list print 'allAnchor_list' print", "peer_list print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0]", "chaincode computation load as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type", "=[] # Load connection profile for orgName in os.listdir(work_dir +", "running duration : 0 hour 10 minutes 0 second chaincode_invoke[\"iterationCount\"]", "tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj)", "yaml import os import ast import sys from collections import", "= tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if network_type ==", "is to create as much chaincode computation load as possible\"", "org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list: join_item", ")][\"peers\"])[0] # When there is only peer or orderer, we", "or orderer, we skip tests. if len(orderer_list) == 0 or", "connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName]", "= str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with", "exit(1) # Load template file with open(curr_dir + \"/templates/testplan_template.yml\", 'r')", "testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"]", "{} org_anchor_dict ={} allAnchor_list =[] # Load connection profile for", "[] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) != True: print 'certs keyfiles", "directory do not exist' exit(1) # Load template file with", "sys from collections import OrderedDict curr_dir = os.getcwd() work_dir =", "'/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list))", "= ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE", "str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default", "chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command", "sys.argv[2] testplan_dict = {} testplan_dict[\"name\"] = \"System performance test\" testplan_dict[\"description\"]", "= [] orderer_list = [] peer_list = [] org_peers_dict =", "org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke with", "with open(work_dir + '/keyfiles/' + orgName + '/connection.yml', 'r') as", "testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: ' print org_list_lowercase print 'orderer_list: '", "org in org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"]", "== 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"]", "channel_join = template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke", "chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile", "{} org_list = [] org_list_lowercase = [] orderer_list = []", "# CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE", "much chaincode computation load as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if", "None: continue orderer_list = orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] !=", "= ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: '", "'/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' + orgName + '/connection.yml'): with", "print 'certs keyfiles directory do not exist' exit(1) # Load", "# Load template file with open(curr_dir + \"/templates/testplan_template.yml\", 'r') as", "# Load connection profile for orgName in os.listdir(work_dir + '/keyfiles'):", "org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list +", "orgName + '/connection.yml', 'r') as stream: connectionProfile = yaml.load(stream) if", "allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: ' print org_list_lowercase print", "org_list[0] if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"]", "testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: ' print org_list_lowercase print 'orderer_list:", ": 0 hour 10 minutes 0 second. # And enable", "template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"]", "network_type = sys.argv[2] testplan_dict = {} testplan_dict[\"name\"] = \"System performance", "with fixed running duration : 0 hour 10 minutes 0", "else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0]", "= template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list =", "exist' exit(1) # Load template file with open(curr_dir + \"/templates/testplan_template.yml\",", "as stream: template = yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] # channel_join", "= ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke with fixed running", "= \"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] =", "command with customized image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\"", "= tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan file outputfile =open(work_dir +", "chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list", "orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list = [] install_list", "in join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in install_list:", "org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list + \\ connectionProfile[\"organizations\"][orgName.lower(", "template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate =", "# Execute command with customized image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"]", "org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\")", "connectionProfile[\"orderers\"] is None: continue orderer_list = orderer_list + connectionProfile[\"orderers\"].keys() if", "peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"]", "+ '/connection.yml'): with open(work_dir + '/keyfiles/' + orgName + '/connection.yml',", "is None: continue orderer_list = orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"]", "= str(chaincode_invoke) # Invoke with fixed running duration : 0", "True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) != True:", "= [] install_list = [] for org in org_list: channel_join", "org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE # Invoke with fixed", "[] install_list = [] for org in org_list: channel_join =", "list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in org_list : tempOrgAnchorObj={}", "== \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan file", "os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' + orgName +", "if network_type == \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False", "CHAINCODE_INVOKE # Invoke with fixed transaction count : 100 chaincode_invoke[\"iterationCount\"]", "org_list_lowercase print 'orderer_list: ' print orderer_list print 'peer_list: ' print", "orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={}", "'/connection.yml', 'r') as stream: connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"] is", "= sys.argv[1] network_type = sys.argv[2] testplan_dict = {} testplan_dict[\"name\"] =", "org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org])", "channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list =", "'r') as stream: template = yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] #", "in install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] =", "= template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list = [] org_list_lowercase =", "testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: ' print org_list_lowercase", "customized image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr=", "= '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time = str(chaincode_invoke) # Invoke", "testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type == \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3]", "== \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] =", "= {} org_list = [] org_list_lowercase = [] orderer_list =", "allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type == 'cello':", "+ orgName + '/connection.yml'): with open(work_dir + '/keyfiles/' + orgName", "0 second. # And enable running tests parallel by setting", "= False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[]", "JOIN_CHANNEL and INSTALL_CHAINCODE join_list = [] install_list = [] for", "connection profile for orgName in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir", "peer or orderer, we skip tests. if len(orderer_list) == 0", "= peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) #", "= [] peer_list = [] org_peers_dict = {} org_anchor_dict ={}", "images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with customized image execute_command[\"name\"] =", "channel_create = template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"]", "orgName + '/connection.yml'): with open(work_dir + '/keyfiles/' + orgName +", "\\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When", "\"System performance test\" testplan_dict[\"description\"] = \"This test is to create", "= os.getcwd() work_dir = sys.argv[1] network_type = sys.argv[2] testplan_dict =", "testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) #", "\"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in org_list", "testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list = [] install_list =", "False chaincoode_invoke_time = str(chaincode_invoke) # Invoke with fixed running duration", "image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False)", "install_list.append(str(chaincode_install)) for join_org in join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for", "= orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list = []", "install_list = [] for org in org_list: channel_join = template[\"CHANNEL_JOIN\"]", "channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join))", "skip tests. if len(orderer_list) == 0 or len(peer_list) == 0:", "&allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if", "outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName", "= [] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) != True: print 'certs", "network_type == \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"]", "[] orderer_list = [] peer_list = [] org_peers_dict = {}", "with fixed transaction count : 100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"]", "= \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in", "join_list = [] install_list = [] for org in org_list:", "org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list + \\", "curr_dir = os.getcwd() work_dir = sys.argv[1] network_type = sys.argv[2] testplan_dict", ")][\"peers\"] peer_list = peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] =", "# Dump testplan file outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(tempstr) outputfile.close()", "testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"]", "= org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE # Invoke with", "sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] = False", "channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium'", "!= True: print 'certs keyfiles directory do not exist' exit(1)", "second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke)", "= '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count =", "+ orgName + '/connection.yml', 'r') as stream: connectionProfile = yaml.load(stream)", "connectionProfile = {} org_list = [] org_list_lowercase = [] orderer_list", "chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE # Invoke with fixed transaction", "testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with customized image execute_command[\"name\"] = \"execute-command-with-customized-image\"", "for org in org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org", "from collections import OrderedDict curr_dir = os.getcwd() work_dir = sys.argv[1]", "os.path.isfile(work_dir + '/keyfiles/' + orgName + '/connection.yml'): with open(work_dir +", "testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command)))", "testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"]", "= ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list: join_item = ast.literal_eval(join_org)", "as stream: connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"] is None: continue", "orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName]", "orgName in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' +", "with customized image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command)))", "# When there is only peer or orderer, we skip", "={} allAnchor_list =[] # Load connection profile for orgName in", "for orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj)", "keyfiles directory do not exist' exit(1) # Load template file", "= yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"] chaincode_install", "&allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if network_type == \"ibp\": tempstr", "connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there is only peer or orderer,", "peer_list = peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted(", "tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan file outputfile =open(work_dir + '/testplan_example.yml','w')", "there is only peer or orderer, we skip tests. if", "import yaml import os import ast import sys from collections", "org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there is only", "0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel =", "for orgName in org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr", "orderer_list = orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName)", "'orderer_list: ' print orderer_list print 'peer_list: ' print peer_list print", "import sys from collections import OrderedDict curr_dir = os.getcwd() work_dir", "channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase)", "network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium'", "if os.path.exists(work_dir) != True: print 'certs keyfiles directory do not", "sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there is only peer or", "print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type ==", "import ast import sys from collections import OrderedDict curr_dir =", "&runid\") if network_type == \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") #", "&networkid\") # Dump testplan file outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(tempstr)", "testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) #", "tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list:", "\"This test is to create as much chaincode computation load", "'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type", "tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr =", "= template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate", "orderer_list print 'peer_list: ' print peer_list print 'allAnchor_list' print allAnchor_list", "tests. if len(orderer_list) == 0 or len(peer_list) == 0: outputfile", "False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[] if", "\"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] = \"default\"", "if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] =", "'100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke)", "','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke with fixed running duration", "= template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke =", "100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list)", "fixed transaction count : 100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] =", "second. # And enable running tests parallel by setting waitUntilFinish", "= tempstr.replace(\"runid:\",\"runid: &runid\") if network_type == \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid:", "yaml.load(stream) if connectionProfile[\"orderers\"] is None: continue orderer_list = orderer_list +", "in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"]", "' print orderer_list print 'peer_list: ' print peer_list print 'allAnchor_list'", "# CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for", "true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time = str(chaincode_invoke)", "print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if", "= \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr", "orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in org_list", "install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0]", "tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr =", "= 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"]", "[] org_peers_dict = {} org_anchor_dict ={} allAnchor_list =[] # Load", "install_org in install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"]", "testplan_dict = {} testplan_dict[\"name\"] = \"System performance test\" testplan_dict[\"description\"] =", "stream: connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"] is None: continue orderer_list", "allAnchor_list =[] # Load connection profile for orgName in os.listdir(work_dir", "duration : 0 hour 10 minutes 0 second. # And", "= sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there is only peer", "0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list =", "!= None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list =", "# JOIN_CHANNEL and INSTALL_CHAINCODE join_list = [] install_list = []", "tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org", "join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install))", "'/keyfiles/' + orgName + '/connection.yml'): with open(work_dir + '/keyfiles/' +", "= yaml.load(stream) if connectionProfile[\"orderers\"] is None: continue orderer_list = orderer_list", "testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print", "= org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list:", "= ','.join(peer_list) # CHAINCODE_INVOKE # Invoke with fixed transaction count", "testplan_dict[\"description\"] = \"This test is to create as much chaincode", "{} testplan_dict[\"name\"] = \"System performance test\" testplan_dict[\"description\"] = \"This test", "list(OrderedDict.fromkeys(peer_list)) for orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName]", "orgName in org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr =", "= tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers:", "True: print 'certs keyfiles directory do not exist' exit(1) #", "test is to create as much chaincode computation load as", "channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and", "Load connection profile for orgName in os.listdir(work_dir + '/keyfiles'): if", "test\" testplan_dict[\"description\"] = \"This test is to create as much", "org_anchor_dict ={} allAnchor_list =[] # Load connection profile for orgName", "minutes 0 second. # And enable running tests parallel by", "in org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers:", "join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in install_list: install_item", "default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with customized image execute_command[\"name\"]", "' print peer_list print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"]", "# And enable running tests parallel by setting waitUntilFinish to", "to create as much chaincode computation load as possible\" testplan_dict[\"runid\"]", "print 'orderer_list: ' print orderer_list print 'peer_list: ' print peer_list", "'r') as stream: connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"] is None:", "= False chaincoode_invoke_time = str(chaincode_invoke) # Invoke with fixed running", "chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke with fixed", "= [] for org in org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"]", "chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command", "for orgName in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/'", ": tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr", "is only peer or orderer, we skip tests. if len(orderer_list)", "load as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type == \"ibp\":", "template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"]", "\"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] = []", "print 'org list: ' print org_list_lowercase print 'orderer_list: ' print", "join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in install_list: install_item =", "as much chaincode computation load as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\"", "setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False", "chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) #", "Invoke with fixed running duration : 0 hour 10 minutes", "= list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in org_list :", "join_org in join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in", "connYamlStr for orgName in org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\")", "'certs keyfiles directory do not exist' exit(1) # Load template", "tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if network_type == \"ibp\":", "' print org_list_lowercase print 'orderer_list: ' print orderer_list print 'peer_list:", "str(chaincode_invoke) # Invoke with fixed running duration : 0 hour", "hour 10 minutes 0 second. # And enable running tests", "= template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command =", "= {} testplan_dict[\"name\"] = \"System performance test\" testplan_dict[\"description\"] = \"This", "install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"]", "= False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] =", "by setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] =", "for install_org in install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE", "chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel))", "file with open(curr_dir + \"/templates/testplan_template.yml\", 'r') as stream: template =", "= org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) #", "and INSTALL_CHAINCODE join_list = [] install_list = [] for org", "[] peer_list = [] org_peers_dict = {} org_anchor_dict ={} allAnchor_list", "== 0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list", "Invoke with fixed transaction count : 100 chaincode_invoke[\"iterationCount\"] = '100'", "[] org_list_lowercase = [] orderer_list = [] peer_list = []", "collections import OrderedDict curr_dir = os.getcwd() work_dir = sys.argv[1] network_type", "(connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list", "= peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower(", "outputfile.write(\"\") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for", "testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"]", "performance test\" testplan_dict[\"description\"] = \"This test is to create as", "template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile = {}", "OrderedDict curr_dir = os.getcwd() work_dir = sys.argv[1] network_type = sys.argv[2]", "= connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"]", ": 100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] =", "channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"]", "running duration : 0 hour 10 minutes 0 second. #", "chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time = str(chaincode_invoke) # Invoke with fixed", "enable running tests parallel by setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"]", "'0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time = str(chaincode_invoke) # Invoke with", "tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName])", "= ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] =", "\"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan file outputfile", "&\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr", "duration : 0 hour 10 minutes 0 second chaincode_invoke[\"iterationCount\"] =", "+ '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list =", "= template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] =", "testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in org_list :", "# channel_join = template[\"CHANNEL_JOIN\"] chaincode_install = template[\"CHAINCODE_INSTALL\"] chaincode_instantiate = template[\"CHAINCODE_INSTANTIATE\"]", "waitUntilFinish to true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time", "if len(orderer_list) == 0 or len(peer_list) == 0: outputfile =open(work_dir", "chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count", "to true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time =", "= template[\"CHAINCODE_INSTANTIATE\"] chaincode_invoke = template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile =", "orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org])", ")][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there is", "chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"] = False chaincoode_invoke_time = str(chaincode_invoke) #", "= '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count))", "# Invoke with fixed transaction count : 100 chaincode_invoke[\"iterationCount\"] =", "','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)}) print 'org list: ' print", "= ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in install_list: install_item = ast.literal_eval(install_org)", "network_type == \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan", "continue orderer_list = orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None):", "channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"]", "And enable running tests parallel by setting waitUntilFinish to true", "with open(curr_dir + \"/templates/testplan_template.yml\", 'r') as stream: template = yaml.load(stream)", "Load template file with open(curr_dir + \"/templates/testplan_template.yml\", 'r') as stream:", "= [] org_peers_dict = {} org_anchor_dict ={} allAnchor_list =[] #", "chaincoode_invoke_count = str(chaincode_invoke) # Invoke with fixed running duration :", "'0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time))", "testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default images", "orderer_list = [] peer_list = [] org_peers_dict = {} org_anchor_dict", "tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if network_type", "as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type == \"ibp\": testplan_dict[\"networkid\"]", "INSTALL_CHAINCODE join_list = [] install_list = [] for org in", "for join_org in join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org", "','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL and INSTALL_CHAINCODE join_list", "','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list: join_item = ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item)", "Execute command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with", "# Invoke with fixed running duration : 0 hour 10", "import os import ast import sys from collections import OrderedDict", "tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr =", "= sys.argv[3] testplan_dict[\"collectFabricMetrics\"] = False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] =", "connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"] is None: continue orderer_list =", "'/keyfiles/' + orgName + '/connection.yml', 'r') as stream: connectionProfile =", "sys.argv[1] network_type = sys.argv[2] testplan_dict = {} testplan_dict[\"name\"] = \"System", "= True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) !=", "= org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)})", "yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"] chaincode_install =", "peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_count)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_time)) testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute", "== 0 or len(peer_list) == 0: outputfile =open(work_dir + '/testplan_example.yml','w')", "running tests parallel by setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"] =", "[] for org in org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] =", "','.join(peer_list) # CHAINCODE_INVOKE # Invoke with fixed transaction count :", "tempstr = tempstr.replace(\"runid:\",\"runid: &runid\") if network_type == \"ibp\": tempstr =", "'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] =", "if connectionProfile[\"orderers\"] is None: continue orderer_list = orderer_list + connectionProfile[\"orderers\"].keys()", "= org_list[0] chaincode_invoke[\"parameters\"][\"peers\"] = ','.join(peer_list) chaincoode_invoke_count = str(chaincode_invoke) # Invoke", "= orderer_list + connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower())", "# CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"]", "import OrderedDict curr_dir = os.getcwd() work_dir = sys.argv[1] network_type =", "transaction count : 100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0]", "ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list)", "execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName", "+ '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' + orgName + '/connection.yml'):", "= \"System performance test\" testplan_dict[\"description\"] = \"This test is to", "=open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list", "channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create)", "in org_list: channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] =", "or len(peer_list) == 0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close()", "= tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors:", "= \"RUNID_HERE\" if network_type == \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"]", "+ connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] =", "print org_list_lowercase print 'orderer_list: ' print orderer_list print 'peer_list: '", "chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in join_list: join_item =", "CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] = org_list[0] if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] =", "\"RUNID_HERE\" if network_type == \"ibp\": testplan_dict[\"networkid\"] = sys.argv[3] testplan_dict[\"collectFabricMetrics\"] =", "connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in org_list : tempstr", "#!/usr/bin/python import yaml import os import ast import sys from", "len(orderer_list) == 0 or len(peer_list) == 0: outputfile =open(work_dir +", "with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with customized image", "create as much chaincode computation load as possible\" testplan_dict[\"runid\"] =", "execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr=", "computation load as possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type ==", "list: ' print org_list_lowercase print 'orderer_list: ' print orderer_list print", "=[] if os.path.exists(work_dir) != True: print 'certs keyfiles directory do", "org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL", "None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] peer_list = peer_list", "= list(OrderedDict.fromkeys(peer_list)) for orgName in org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] =", "'org list: ' print org_list_lowercase print 'orderer_list: ' print orderer_list", "execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list = [] org_list_lowercase", "tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\")", "'FabricConsortium' else: channel_create[\"parameters\"][\"channelConsortium\"] = 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] =", "0 hour 10 minutes 0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"]", "tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump testplan file outputfile =open(work_dir", "tempstr= connYamlStr for orgName in org_list : tempstr = tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor:", "+ '/keyfiles/' + orgName + '/connection.yml'): with open(work_dir + '/keyfiles/'", "ast import sys from collections import OrderedDict curr_dir = os.getcwd()", "= {} org_anchor_dict ={} allAnchor_list =[] # Load connection profile", "count : 100 chaincode_invoke[\"iterationCount\"] = '100' chaincode_invoke[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_invoke[\"parameters\"][\"peers\"]", "possible\" testplan_dict[\"runid\"] = \"RUNID_HERE\" if network_type == \"ibp\": testplan_dict[\"networkid\"] =", "= 'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) #", "stream: template = yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] # channel_join =", "= sys.argv[2] testplan_dict = {} testplan_dict[\"name\"] = \"System performance test\"", "org_list : tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] =", "org_list_lowercase = [] orderer_list = [] peer_list = [] org_peers_dict", "orderer, we skip tests. if len(orderer_list) == 0 or len(peer_list)", "org_peers_dict = {} org_anchor_dict ={} allAnchor_list =[] # Load connection", "if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"]", "os import ast import sys from collections import OrderedDict curr_dir", "<gh_stars>1-10 #!/usr/bin/python import yaml import os import ast import sys", "print 'peer_list: ' print peer_list print 'allAnchor_list' print allAnchor_list #", "= ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) # CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] =", ": tempOrgAnchorObj={} tempOrgAnchorObj[orgName+\"Anchor\"] = org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName])", "+ '/keyfiles/' + orgName + '/connection.yml', 'r') as stream: connectionProfile", "chaincoode_invoke_time = str(chaincode_invoke) # Invoke with fixed running duration :", "'SampleConsortium' channel_create[\"parameters\"][\"channelOrgs\"] = ','.join(org_list_lowercase) channel_create[\"parameters\"][\"ordererName\"] = orderer_list[0] testplan_dict[\"tests\"].append(channel_create) # JOIN_CHANNEL", "channel_join = template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"]", "+ \"/templates/testplan_template.yml\", 'r') as stream: template = yaml.load(stream) channel_create =", "CHAINCODE_INSTANTIATE chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE #", "template[\"CHAINCODE_INVOKE\"] execute_command = template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list = []", "testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) != True: print 'certs keyfiles directory", "hour 10 minutes 0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] =", "fixed running duration : 0 hour 10 minutes 0 second.", "template = yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"] # channel_join = template[\"CHANNEL_JOIN\"]", "= [] org_list_lowercase = [] orderer_list = [] peer_list =", "outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list))", "open(curr_dir + \"/templates/testplan_template.yml\", 'r') as stream: template = yaml.load(stream) channel_create", "testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir) != True: print", ": 0 hour 10 minutes 0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s'", "= orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] =", "command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command with customized", "+ '/connection.yml', 'r') as stream: connectionProfile = yaml.load(stream) if connectionProfile[\"orderers\"]", "tests parallel by setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"] = '0h10m0s'", "tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid: &runid\")", "&\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr", "peer_list + \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0]", "os.path.exists(work_dir) != True: print 'certs keyfiles directory do not exist'", "0 hour 10 minutes 0 second. # And enable running", "'peer_list: ' print peer_list print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL", "template[\"CHANNEL_JOIN\"] channel_join[\"parameters\"][\"connectionProfile\"] = org channel_join[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0]", "exit(0) orderer_list = list(OrderedDict.fromkeys(orderer_list)) peer_list = list(OrderedDict.fromkeys(peer_list)) for orgName in", "tempstr.replace(\"runid:\",\"runid: &runid\") if network_type == \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\")", "ast.literal_eval(join_org) testplan_dict[\"tests\"].append(join_item) for install_org in install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item)", "\"/templates/testplan_template.yml\", 'r') as stream: template = yaml.load(stream) channel_create = template[\"CHANNEL_CREATE\"]", "work_dir = sys.argv[1] network_type = sys.argv[2] testplan_dict = {} testplan_dict[\"name\"]", "testplan_dict[\"name\"] = \"System performance test\" testplan_dict[\"description\"] = \"This test is", "connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] # When there", "+ \\ connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"] org_anchor_dict[orgName] = sorted( connectionProfile[\"organizations\"][orgName.lower( )][\"peers\"])[0] #", "print peer_list print 'allAnchor_list' print allAnchor_list # CREATE_CHANNEL channel_create[\"parameters\"][\"connectionProfile\"] =", "10 minutes 0 second. # And enable running tests parallel", "yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for orgName in org_list : tempstr =", "os.getcwd() work_dir = sys.argv[1] network_type = sys.argv[2] testplan_dict = {}", "False testplan_dict[\"storageclass\"] = \"default\" testplan_dict[\"saveLog\"] = False testplan_dict[\"continueAfterFail\"] = True", "'/connection.yml'): with open(work_dir + '/keyfiles/' + orgName + '/connection.yml', 'r')", "template[\"EXECUTE_COMMAND\"] connectionProfile = {} org_list = [] org_list_lowercase = []", "testplan_dict[\"continueAfterFail\"] = True testplan_dict[\"tests\"] = [] testplan_dict[\"peernodeAlias\"] =[] if os.path.exists(work_dir)", "peer_list = [] org_peers_dict = {} org_anchor_dict ={} allAnchor_list =[]", "CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org", "= tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\") tempstr = tempstr.replace(\"allPeers:\",\"allPeers: &allPeers\") tempstr = tempstr.replace(\"runid:\",\"runid:", "only peer or orderer, we skip tests. if len(orderer_list) ==", "if network_type == \"ibp\": tempstr = tempstr.replace(\"networkid:\",\"networkid: &networkid\") # Dump", "chaincode_instantiate[\"parameters\"][\"connectionProfile\"] = org_list[0] chaincode_instantiate[\"parameters\"][\"peers\"] = ','.join(peer_list) # CHAINCODE_INVOKE # Invoke", "open(work_dir + '/keyfiles/' + orgName + '/connection.yml', 'r') as stream:", "parallel by setting waitUntilFinish to true chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"waitUntilFinish\"]", "do not exist' exit(1) # Load template file with open(curr_dir", "in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir + '/keyfiles/' + orgName", "0 or len(peer_list) == 0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\")", "Execute command with customized image execute_command[\"name\"] = \"execute-command-with-customized-image\" execute_command[\"container\"] =", "10 minutes 0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0]", "minutes 0 second chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel", "profile for orgName in os.listdir(work_dir + '/keyfiles'): if os.path.isfile(work_dir +", "we skip tests. if len(orderer_list) == 0 or len(peer_list) ==", "chaincode_install[\"parameters\"][\"connectionProfile\"] = org chaincode_install[\"parameters\"][\"peers\"] = ','.join(org_peers_dict[org]) install_list.append(str(chaincode_install)) for join_org in", "if os.path.isfile(work_dir + '/keyfiles/' + orgName + '/connection.yml'): with open(work_dir", "connectionProfile[\"orderers\"].keys() if (connectionProfile[\"organizations\"][orgName.lower()][\"peers\"] != None): org_list.append(orgName) org_list_lowercase.append(orgName.lower()) org_peers_dict[orgName] = connectionProfile[\"organizations\"][orgName.lower(", "org_anchor_dict[orgName] testplan_dict[\"peernodeAlias\"].append(tempOrgAnchorObj) tempOrgPeersObj={} tempOrgPeersObj[orgName+\"Peers\"] = ','.join(org_peers_dict[orgName]) testplan_dict[\"peernodeAlias\"].append(tempOrgPeersObj) allAnchor_list.append(org_anchor_dict[orgName]) testplan_dict[\"peernodeAlias\"].append({\"allAnchors\":','.join(allAnchor_list)}) testplan_dict[\"peernodeAlias\"].append({\"allPeers\":','.join(peer_list)})", "template file with open(curr_dir + \"/templates/testplan_template.yml\", 'r') as stream: template", "fixed running duration : 0 hour 10 minutes 0 second", "= org_list[0] if network_type == 'cello': channel_create[\"parameters\"][\"channelConsortium\"] = 'FabricConsortium' else:", "# Execute command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute command", "not exist' exit(1) # Load template file with open(curr_dir +", "# CHAINCODE_INVOKE # Invoke with fixed transaction count : 100", "org_list = [] org_list_lowercase = [] orderer_list = [] peer_list", "','.join(org_peers_dict[org]) channel_join[\"parameters\"][\"ordererName\"] = orderer_list[0] join_list.append(str(channel_join)) # CHAINCODE_INSTALL chaincode_install[\"parameters\"][\"connectionProfile\"] = org", "When there is only peer or orderer, we skip tests.", "len(peer_list) == 0: outputfile =open(work_dir + '/testplan_example.yml','w') outputfile.write(\"\") outputfile.close() exit(0)", "\"execute-command-with-customized-image\" execute_command[\"container\"] = \"user/ownimage\" testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) connYamlStr= yaml.dump(testplan_dict,default_flow_style=False) tempstr= connYamlStr for", "chaincode_invoke[\"iterationCount\"] = '0h10m0s' chaincode_invoke[\"parameters\"][\"peers\"] = peer_list[0] chaincoode_invoke_parallel = str(chaincode_invoke) testplan_dict[\"tests\"].append(chaincode_instantiate)", "testplan_dict[\"tests\"].append(ast.literal_eval(chaincoode_invoke_parallel)) # Execute command with default images testplan_dict[\"tests\"].append(ast.literal_eval(str(execute_command))) # Execute", "testplan_dict[\"tests\"].append(join_item) for install_org in install_list: install_item = ast.literal_eval(install_org) testplan_dict[\"tests\"].append(install_item) #", "tempstr.replace(orgName+\"Anchor:\",orgName+\"Anchor: &\"+orgName+\"Anchor\") tempstr = tempstr.replace(orgName+\"Peers:\",orgName+\"Peers: &\"+orgName+\"Peers\") tempstr = tempstr.replace(\"allAnchors:\",\"allAnchors: &allAnchors\")", "= \"This test is to create as much chaincode computation" ]
[ "len(db.findings) == 1 db.add(f1) assert f1 in db.findings assert len(db.findings)", "db.findings assert len(db.findings) == 2 db.add(f2) assert f2 in db.findings", "assert f0 in db.findings assert len(db.findings) == 1 db.add(f1) assert", "assert len(db.findings) == 2 # test exceptions with pytest.raises(TypeError): db.add(1)", "assert len(db.findings) == 2 db.add(f2) assert f2 in db.findings assert", "dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is missing.') f1 =", "off', 'my_PC', 'The monitor is switched off.') def test_add_remove(): \"\"\"Test", "f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.') f2", "len(db.findings) == 2 db.add(f2) assert f2 in db.findings assert len(db.findings)", "'my_PC', 'The monitor is switched off.') def test_add_remove(): \"\"\"Test function.\"\"\"", "'The CD drive is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The", "= dblib.lib.BackyardDB() # regular cases db.add(f0) assert f0 in db.findings", "test exceptions with pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test function.\"\"\" db", "len(db.findings) == 3 db.add(None) assert len(db.findings) == 3 db.remove(f1) assert", "in db.findings assert len(db.findings) == 3 db.add(None) assert len(db.findings) ==", "power cord is unplugged.') f2 = dblib.lib.Finding('Monitor switched off', 'my_PC',", "switched off.') def test_add_remove(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() #", "function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert f2", "monitor is switched off.') def test_add_remove(): \"\"\"Test function.\"\"\" db =", "dblib.lib.BackyardDB() # regular cases db.add(f0) assert f0 in db.findings assert", "db.add(f0) db.add(f1) db.update(f1, f2) assert f2 in db.findings assert len(db.findings)", "off.') def test_add_remove(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() # regular", "not in db.findings assert len(db.findings) == 2 # test exceptions", "cord is unplugged.') f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The", "spook', 'my_PC', 'The CD drive is missing.') f1 = dblib.lib.Finding('Unplugged',", "with pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB()", "in db.findings assert len(db.findings) == 2 db.add(f2) assert f2 in", "== 2 db.add(f2) assert f2 in db.findings assert len(db.findings) ==", "# regular cases db.add(f0) assert f0 in db.findings assert len(db.findings)", "unplugged.') f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is", "f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is switched", "2 db.add(f2) assert f2 in db.findings assert len(db.findings) == 3", "dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is", "= dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.') f2 =", "assert f1 in db.findings assert len(db.findings) == 2 db.add(f2) assert", "'The power cord is unplugged.') f2 = dblib.lib.Finding('Monitor switched off',", "f1 in db.findings assert len(db.findings) == 2 db.add(f2) assert f2", "\"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() # regular cases db.add(f0) assert", "function.\"\"\" db = dblib.lib.BackyardDB() # regular cases db.add(f0) assert f0", "len(db.findings) == 3 db.remove(f1) assert f1 not in db.findings assert", "dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert f2 in db.findings assert", "import dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive", "exceptions with pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test function.\"\"\" db =", "2 # test exceptions with pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test", "assert len(db.findings) == 3 db.remove(f1) assert f1 not in db.findings", "dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is switched off.') def", "is switched off.') def test_add_remove(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB()", "switched off', 'my_PC', 'The monitor is switched off.') def test_add_remove():", "db.add(None) assert len(db.findings) == 3 db.remove(f1) assert f1 not in", "db.remove(f1) assert f1 not in db.findings assert len(db.findings) == 2", "in db.findings assert len(db.findings) == 1 db.add(f1) assert f1 in", "= dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor is switched off.')", "in db.findings assert len(db.findings) == 2 # test exceptions with", "assert len(db.findings) == 1 db.add(f1) assert f1 in db.findings assert", "f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is missing.')", "\"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert", "f0 in db.findings assert len(db.findings) == 1 db.add(f1) assert f1", "def test_add_remove(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() # regular cases", "db.add(f1) db.update(f1, f2) assert f2 in db.findings assert len(db.findings) ==", "dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.') f2 = dblib.lib.Finding('Monitor", "3 db.remove(f1) assert f1 not in db.findings assert len(db.findings) ==", "is unplugged.') f2 = dblib.lib.Finding('Monitor switched off', 'my_PC', 'The monitor", "of tests.\"\"\" import pytest import dblib.lib f0 = dblib.lib.Finding('CD spook',", "cases db.add(f0) assert f0 in db.findings assert len(db.findings) == 1", "f1 not in db.findings assert len(db.findings) == 2 # test", "tests.\"\"\" import pytest import dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC',", "# test exceptions with pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test function.\"\"\"", "len(db.findings) == 2 # test exceptions with pytest.raises(TypeError): db.add(1) def", "drive is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord", "'my_PC', 'The power cord is unplugged.') f2 = dblib.lib.Finding('Monitor switched", "'my_PC', 'The CD drive is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC',", "is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is", "assert f1 not in db.findings assert len(db.findings) == 2 #", "regular cases db.add(f0) assert f0 in db.findings assert len(db.findings) ==", "== 2 # test exceptions with pytest.raises(TypeError): db.add(1) def test_update():", "db = dblib.lib.BackyardDB() # regular cases db.add(f0) assert f0 in", "db.add(f1) assert f1 in db.findings assert len(db.findings) == 2 db.add(f2)", "db.findings assert len(db.findings) == 3 db.add(None) assert len(db.findings) == 3", "'The monitor is switched off.') def test_add_remove(): \"\"\"Test function.\"\"\" db", "== 3 db.add(None) assert len(db.findings) == 3 db.remove(f1) assert f1", "pytest.raises(TypeError): db.add(1) def test_update(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0)", "3 db.add(None) assert len(db.findings) == 3 db.remove(f1) assert f1 not", "f2 in db.findings assert len(db.findings) == 3 db.add(None) assert len(db.findings)", "assert f2 in db.findings assert len(db.findings) == 3 db.add(None) assert", "def test_update(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1,", "db.add(f0) assert f0 in db.findings assert len(db.findings) == 1 db.add(f1)", "test_update(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2)", "missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power cord is unplugged.')", "assert len(db.findings) == 3 db.add(None) assert len(db.findings) == 3 db.remove(f1)", "CD drive is missing.') f1 = dblib.lib.Finding('Unplugged', 'my_PC', 'The power", "db.findings assert len(db.findings) == 2 # test exceptions with pytest.raises(TypeError):", "test_add_remove(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() # regular cases db.add(f0)", "db.findings assert len(db.findings) == 1 db.add(f1) assert f1 in db.findings", "import pytest import dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The", "db.update(f1, f2) assert f2 in db.findings assert len(db.findings) == 2", "db.add(f2) assert f2 in db.findings assert len(db.findings) == 3 db.add(None)", "db = dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert f2 in", "db.add(1) def test_update(): \"\"\"Test function.\"\"\" db = dblib.lib.BackyardDB() db.add(f0) db.add(f1)", "= dblib.lib.Finding('CD spook', 'my_PC', 'The CD drive is missing.') f1", "== 1 db.add(f1) assert f1 in db.findings assert len(db.findings) ==", "== 3 db.remove(f1) assert f1 not in db.findings assert len(db.findings)", "= dblib.lib.BackyardDB() db.add(f0) db.add(f1) db.update(f1, f2) assert f2 in db.findings", "pytest import dblib.lib f0 = dblib.lib.Finding('CD spook', 'my_PC', 'The CD", "\"\"\"Collection of tests.\"\"\" import pytest import dblib.lib f0 = dblib.lib.Finding('CD", "1 db.add(f1) assert f1 in db.findings assert len(db.findings) == 2" ]
[ "for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from", "for i in result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs):", "sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None) management_group =", "result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else:", "in result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint:", "the MIT License. See License.txt in the project root for", "namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models", "import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint:", "= cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None)", "the project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators", "query_options = QueryOptions(top=2000) return [metadata.name for metadata in client.list(query_options) if", "QueryOptions query_options = QueryOptions(top=2000) return [metadata.name for metadata in client.list(query_options)", "= QueryOptions(top=2000) return [metadata.name for metadata in client.list(query_options) if metadata.name.startswith(prefix)]", "rg = getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace, 'management_group_name', None)", "get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace, 'management_group_name',", "client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name',", "# pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions", "# pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg", "pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options", "result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument", "import Completer from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import cf_policy_insights", "azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000) return [metadata.name for metadata", "= get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace,", "See License.txt in the project root for license information. #", "getattr(namespace, 'management_group_name', None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif", "._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): #", "management_group = getattr(namespace, 'management_group_name', None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub,", "elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return", "information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import", "= client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in result] @Completer def", "from azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000) return [metadata.name for", "cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000) return [metadata.name", "= cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000) return", "-------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import get_subscription_id from", "None) management_group = getattr(namespace, 'management_group_name', None) if rg: result =", "from ._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs):", "cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument", "client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in", "# Copyright (c) Microsoft Corporation. All rights reserved. # Licensed", "disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options =", "MIT License. See License.txt in the project root for license", "'resource_group_name', None) management_group = getattr(namespace, 'management_group_name', None) if rg: result", "= getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace, 'management_group_name', None) if", "reserved. # Licensed under the MIT License. See License.txt in", "get_subscription_id from ._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix, namespace,", "else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in result]", "result = client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for", "license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory", "i in result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): #", "Corporation. All rights reserved. # Licensed under the MIT License.", "# Licensed under the MIT License. See License.txt in the", "-------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. #", "import QueryOptions query_options = QueryOptions(top=2000) return [metadata.name for metadata in", "result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in result] @Completer", "@Completer def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client", "License.txt in the project root for license information. # --------------------------------------------------------------------------------------------", "resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub)", "**kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import", "in the project root for license information. # -------------------------------------------------------------------------------------------- from", "[i.name for i in result] @Completer def get_policy_metadata_completion_list(cmd, prefix, namespace,", "client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result =", "disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace,", "License. See License.txt in the project root for license information.", "client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i in result] @Completer def get_policy_metadata_completion_list(cmd,", "# -------------------------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved.", "azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import", "namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub =", "root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer", "Completer from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import cf_policy_insights @Completer", "get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata", "@Completer def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client", "pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg =", "None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result", "under the MIT License. See License.txt in the project root", "from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import cf_policy_insights @Completer def", "import get_subscription_id from ._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd, prefix,", "from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory", "client = cf_policy_insights(cmd.cli_ctx).policy_metadata from azure.mgmt.policyinsights.models import QueryOptions query_options = QueryOptions(top=2000)", "(c) Microsoft Corporation. All rights reserved. # Licensed under the", "All rights reserved. # Licensed under the MIT License. See", "**kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx)", "= getattr(namespace, 'management_group_name', None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg)", "getattr(namespace, 'resource_group_name', None) management_group = getattr(namespace, 'management_group_name', None) if rg:", "'management_group_name', None) if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group:", "= client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result", "management_group: result = client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name", "def get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client =", "Microsoft Corporation. All rights reserved. # Licensed under the MIT", "Licensed under the MIT License. See License.txt in the project", "def get_policy_metadata_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client =", "rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result = client.remediations.list_for_management_group(management_group_id=management_group)", "rights reserved. # Licensed under the MIT License. See License.txt", "azure.cli.core.commands.client_factory import get_subscription_id from ._client_factory import cf_policy_insights @Completer def get_policy_remediation_completion_list(cmd,", "prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx) sub", "prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx).policy_metadata from", "get_policy_remediation_completion_list(cmd, prefix, namespace, **kwargs): # pylint: disable=unused-argument client = cf_policy_insights(cmd.cli_ctx)", "if rg: result = client.remediations.list_for_resource_group(subscription_id=sub, resource_group_name=rg) elif management_group: result =", "= client.remediations.list_for_management_group(management_group_id=management_group) else: result = client.remediations.list_for_subscription(subscription_id=sub) return [i.name for i", "# -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import Completer from azure.cli.core.commands.client_factory import get_subscription_id", "return [i.name for i in result] @Completer def get_policy_metadata_completion_list(cmd, prefix,", "project root for license information. # -------------------------------------------------------------------------------------------- from azure.cli.core.decorators import", "cf_policy_insights(cmd.cli_ctx) sub = get_subscription_id(cmd.cli_ctx) rg = getattr(namespace, 'resource_group_name', None) management_group", "Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under" ]
[ "), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False,", "(\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"),", "(\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"),", "(\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"),", "default=\"%d-%m-%Y\", max_length=50, ), ), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ),", "(\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready to import\"), (\"done\", \"Import complete\"),", "), ), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"),", "upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\" ), ), ( \"state\", models.CharField(", "# -*- coding: utf-8 -*- # Generated by Django 1.10.4", "models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"),", "verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100,", "import django.db.models.deletion import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies =", "\"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\",", "(\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"),", "], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True,", "max_length=50, ), ), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ),", "), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), (", "class Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel(", "unique=True ), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\",", "ready to import\"), (\"done\", \"Import complete\"), ], default=\"pending\", max_length=20, ),", "\"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\",", "\"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\",", "by Django 1.10.4 on 2017-02-25 22:22 from __future__ import unicode_literals", "( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (\"column_number\",", "default=None, max_length=20, null=True, verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)),", "django.db.models.deletion import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies = [(\"hordak\",", "models.CharField( blank=True, choices=[ (None, \"-- Do not import --\"), (\"date\",", "\"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\",", "models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[", "(\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money in only)\"), (\"amount_in\",", "), ( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First line of file contains", "utf-8 -*- # Generated by Django 1.10.4 on 2017-02-25 22:22", "default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ),", "headings\" ), ), ( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to", "models import django.db.models.deletion import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies", "(\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"),", "import\" ), ), ( \"state\", models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\",", "on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ),", "], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"),", "(\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"),", "( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ), ( \"to_field\", models.CharField(", "unicode_literals from django.db import migrations, models import django.db.models.deletion import django.utils.timezone", "verbose_name=\"ID\" ), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ),", "(\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"),", "on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ (", "\"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()),", "( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ], ), migrations.CreateModel(", "Notes\"), ], default=None, max_length=20, null=True, verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True,", "\"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), ( \"uuid\",", "verbose_name=\"CSV file to import\" ), ), ( \"state\", models.CharField( choices=[", "\"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\",", "out only)\"), (\"description\", \"Description / Notes\"), ], default=None, max_length=20, null=True,", "[ migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False,", "import migrations, models import django.db.models.deletion import django.utils.timezone import django_smalluuid.models class", "name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ),", "\"Uploaded, ready to import\"), (\"done\", \"Import complete\"), ], default=\"pending\", max_length=20,", "( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"),", "not import --\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money", "(\"example\", models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\",", "choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\",", "to=\"hordak.StatementImport\" ), ), ], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\",", "blank=True, choices=[ (None, \"-- Do not import --\"), (\"date\", \"Date\"),", "contains headings\" ), ), ( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file", "import --\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money in", "models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ), ( \"to_field\", models.CharField( blank=True, choices=[", "\"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\",", "(\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"),", "(\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ),", "], default=\"pending\", max_length=20, ), ), ( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\",", "(\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"),", "# Generated by Django 1.10.4 on 2017-02-25 22:22 from __future__", "(\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"),", "(\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"),", "auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(),", "( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), ( \"timestamp\",", "\"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\",", "(\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"),", "in only)\"), (\"amount_in\", \"Amount (money out only)\"), (\"description\", \"Description /", "), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ),", "file to import\" ), ), ( \"state\", models.CharField( choices=[ (\"pending\",", "\"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\",", "import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")]", "(\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"),", "models.BooleanField( default=True, verbose_name=\"First line of file contains headings\" ), ),", "), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\",", "(\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"),", "\"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\",", "), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\", models.BooleanField( default=True,", "(\"amount_out\", \"Amount (money in only)\"), (\"amount_in\", \"Amount (money out only)\"),", "\"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\",", "models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First line of", "line of file contains headings\" ), ), ( \"file\", models.FileField(", "models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]},", "(\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"),", "max_length=100, verbose_name=\"Column\"), ), ( \"to_field\", models.CharField( blank=True, choices=[ (None, \"--", "[(\"hordak\", \"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\",", "/ Notes\"), ], default=None, max_length=20, null=True, verbose_name=\"Is\", ), ), (\"example\",", "import django_smalluuid.models class Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations =", "\"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ), ( \"to_field\", models.CharField( blank=True,", "( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\" ), ),", "Generated by Django 1.10.4 on 2017-02-25 22:22 from __future__ import", "22:22 from __future__ import unicode_literals from django.db import migrations, models", "dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel( name=\"TransactionImport\", fields=[", "(\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"),", "(\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"),", "(\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"),", "), ), ( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\"", "\"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\",", "import\"), (\"done\", \"Import complete\"), ], default=\"pending\", max_length=20, ), ), (", "\"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\",", "django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations", "\"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\",", "\"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\" ), ), (", "(\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ), ), (", "(\"description\", \"Description / Notes\"), ], default=None, max_length=20, null=True, verbose_name=\"Is\", ),", "(\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money in only)\"), (\"amount_in\", \"Amount (money", "__future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion", "import unicode_literals from django.db import migrations, models import django.db.models.deletion import", "operations = [ migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField( auto_created=True,", "(\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"),", "<reponame>CodeBrew-LTD/django-hordak # -*- coding: utf-8 -*- # Generated by Django", "), ), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set(", "(\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"),", "( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First line of file contains headings\"", "\"has_headings\", models.BooleanField( default=True, verbose_name=\"First line of file contains headings\" ),", "\"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\",", "\"to_field\", models.CharField( blank=True, choices=[ (None, \"-- Do not import --\"),", "), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\",", "models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready to import\"), (\"done\",", "( \"to_field\", models.CharField( blank=True, choices=[ (None, \"-- Do not import", "\"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\",", "(\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\",", "on 2017-02-25 22:22 from __future__ import unicode_literals from django.db import", "\"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ],", "(\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"),", "= [ migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True,", "), ( \"to_field\", models.CharField( blank=True, choices=[ (None, \"-- Do not", "default=True, verbose_name=\"First line of file contains headings\" ), ), (", "\"Amount (money in only)\"), (\"amount_in\", \"Amount (money out only)\"), (\"description\",", "models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ),", "name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ),", "complete\"), ], default=\"pending\", max_length=20, ), ), ( \"date_format\", models.CharField( choices=[", "models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ), ( \"to_field\",", "), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ], ),", "-*- # Generated by Django 1.10.4 on 2017-02-25 22:22 from", "\"Pending\"), (\"uploaded\", \"Uploaded, ready to import\"), (\"done\", \"Import complete\"), ],", "verbose_name=\"First line of file contains headings\" ), ), ( \"file\",", "\"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\",", "related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether(", "models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\" ), ), ( \"state\",", "), ( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\",", "to import\" ), ), ( \"state\", models.CharField( choices=[ (\"pending\", \"Pending\"),", "only)\"), (\"amount_in\", \"Amount (money out only)\"), (\"description\", \"Description / Notes\"),", "choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready to import\"), (\"done\", \"Import", "\"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\",", "), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ),", "verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey(", "only)\"), (\"description\", \"Description / Notes\"), ], default=None, max_length=20, null=True, verbose_name=\"Is\",", "file contains headings\" ), ), ( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV", "), ( \"state\", models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready", "\"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\",", "Django 1.10.4 on 2017-02-25 22:22 from __future__ import unicode_literals from", "serialize=False, verbose_name=\"ID\" ), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True", "default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ),", "from __future__ import unicode_literals from django.db import migrations, models import", "(\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"),", "\"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ], ), migrations.CreateModel( name=\"TransactionImportColumn\",", "migrations, models import django.db.models.deletion import django.utils.timezone import django_smalluuid.models class Migration(migrations.Migration):", "\"-- Do not import --\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\",", "Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel( name=\"TransactionImport\",", "[\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"), (\"transaction_import\", \"to_field\")]", "migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\"", "), ), ( \"state\", models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded,", "\"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ), ),", "\"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ), ), ( \"hordak_import\",", "(\"done\", \"Import complete\"), ], default=\"pending\", max_length=20, ), ), ( \"date_format\",", "max_length=20, null=True, verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)), (", "editable=False), ), ( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First line of file", "), ), ( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"),", "), ( \"file\", models.FileField( upload_to=\"transaction_imports\", verbose_name=\"CSV file to import\" ),", "(\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"),", "( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (", "(\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"),", "\"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\",", "(\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ), ), ( \"hordak_import\", models.ForeignKey(", "django.db import migrations, models import django.db.models.deletion import django.utils.timezone import django_smalluuid.models", "\"state\", models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready to import\"),", "auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\",", "(\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"),", "of file contains headings\" ), ), ( \"file\", models.FileField( upload_to=\"transaction_imports\",", "\"Amount (money out only)\"), (\"description\", \"Description / Notes\"), ], default=None,", "primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True,", "(money out only)\"), (\"description\", \"Description / Notes\"), ], default=None, max_length=20,", "(\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"),", "null=True, verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\",", "\"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\",", "models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField(", "django_smalluuid.models class Migration(migrations.Migration): dependencies = [(\"hordak\", \"0010_auto_20161216_1202\")] operations = [", "\"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\",", "\"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField(", "), ), (\"example\", models.CharField(blank=True, default=\"\", max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE,", "= [(\"hordak\", \"0010_auto_20161216_1202\")] operations = [ migrations.CreateModel( name=\"TransactionImport\", fields=[ (", "(\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"),", "models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()), (", "coding: utf-8 -*- # Generated by Django 1.10.4 on 2017-02-25", "\"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"), (\"%y/%d/%m\",", "to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\",", "\"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\": [\"transaction_import\",", "serialize=False, verbose_name=\"ID\" ), ), (\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\",", "\"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"), (\"%Y-%m-%d\",", "verbose_name=\"Column\"), ), ( \"to_field\", models.CharField( blank=True, choices=[ (None, \"-- Do", "editable=False, unique=True ), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), (", "(money in only)\"), (\"amount_in\", \"Amount (money out only)\"), (\"description\", \"Description", "(\"uploaded\", \"Uploaded, ready to import\"), (\"done\", \"Import complete\"), ], default=\"pending\",", "options={\"ordering\": [\"transaction_import\", \"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"), (\"transaction_import\",", "(\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\", \"mm-yy-dd\"), (\"%m/%y/%d\", \"mm/yy/dd\"), (\"%m.%y.%d\", \"mm.yy.dd\"), (\"%y-%d-%m\", \"yy-dd-mm\"),", "migrations.CreateModel( name=\"TransactionImport\", fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\"", "), ), ], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField(", "\"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\",", "default=\"pending\", max_length=20, ), ), ( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"),", "Do not import --\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount", "\"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"), (\"%m/%d/%Y\",", "\"column_number\"]}, ), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"), (\"transaction_import\", \"to_field\")] ),", "\"Description / Notes\"), ], default=None, max_length=20, null=True, verbose_name=\"Is\", ), ),", "to import\"), (\"done\", \"Import complete\"), ], default=\"pending\", max_length=20, ), ),", "), ), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\" ), ), ],", "\"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money in only)\"), (\"amount_in\", \"Amount", "max_length=200)), ( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ],", "migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"), (\"transaction_import\", \"to_field\")] ), ), ]", "\"yy-dd-mm\"), (\"%y/%d/%m\", \"yy/dd/mm\"), (\"%y.%d.%m\", \"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\",", "--\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"), (\"amount_out\", \"Amount (money in only)\"),", "(None, \"-- Do not import --\"), (\"date\", \"Date\"), (\"amount\", \"Amount\"),", "django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False),", "fields=[ ( \"id\", models.AutoField( auto_created=True, primary_key=True, serialize=False, verbose_name=\"ID\" ), ),", "\"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First line", "\"yyyy.dd.mm\"), (\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\",", "( \"transaction_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, related_name=\"columns\", to=\"hordak.TransactionImport\", ), ), ], options={\"ordering\":", "\"Import complete\"), ], default=\"pending\", max_length=20, ), ), ( \"date_format\", models.CharField(", "2017-02-25 22:22 from __future__ import unicode_literals from django.db import migrations,", "(\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"), (\"%d.%m.%y\", \"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"),", "\"dd.mm.yy\"), (\"%d-%y-%m\", \"dd-yy-mm\"), (\"%d/%y/%m\", \"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\",", "], default=None, max_length=20, null=True, verbose_name=\"Is\", ), ), (\"example\", models.CharField(blank=True, default=\"\",", "default=\"\", max_length=100, verbose_name=\"Column\"), ), ( \"to_field\", models.CharField( blank=True, choices=[ (None,", "\"yy.dd.mm\"), (\"%y-%m-%d\", \"yy-mm-dd\"), (\"%y/%m/%d\", \"yy/mm/dd\"), (\"%y.%m.%d\", \"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50,", "(\"%Y-%m-%d\", \"yyyy-mm-dd\"), (\"%Y/%m/%d\", \"yyyy/mm/dd\"), (\"%Y.%m.%d\", \"yyyy.mm.dd\"), (\"%d-%m-%y\", \"dd-mm-yy\"), (\"%d/%m/%y\", \"dd/mm/yy\"),", "-*- coding: utf-8 -*- # Generated by Django 1.10.4 on", "), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\", models.BooleanField(", "max_length=20, ), ), ( \"date_format\", models.CharField( choices=[ (\"%d-%m-%Y\", \"dd-mm-yyyy\"), (\"%d/%m/%Y\",", "\"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\",", "\"yy.mm.dd\"), ], default=\"%d-%m-%Y\", max_length=50, ), ), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE,", "choices=[ (None, \"-- Do not import --\"), (\"date\", \"Date\"), (\"amount\",", "\"Amount\"), (\"amount_out\", \"Amount (money in only)\"), (\"amount_in\", \"Amount (money out", "(\"amount_in\", \"Amount (money out only)\"), (\"description\", \"Description / Notes\"), ],", "), migrations.AlterUniqueTogether( name=\"transactionimportcolumn\", unique_together=set( [(\"transaction_import\", \"column_number\"), (\"transaction_import\", \"to_field\")] ), ),", "], default=\"%d-%m-%Y\", max_length=50, ), ), ( \"hordak_import\", models.ForeignKey( on_delete=django.db.models.deletion.CASCADE, to=\"hordak.StatementImport\"", "(\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\", \"dd.yyyy.mm\"), (\"%m-%d-%Y\", \"mm-dd-yyyy\"),", "\"mm-dd-yyyy\"), (\"%m/%d/%Y\", \"mm/dd/yyyy\"), (\"%m.%d.%Y\", \"mm.dd.yyyy\"), (\"%m-%Y-%d\", \"mm-yyyy-dd\"), (\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\",", "( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now, editable=False), ), ( \"has_headings\", models.BooleanField( default=True, verbose_name=\"First", "1.10.4 on 2017-02-25 22:22 from __future__ import unicode_literals from django.db", "\"dd/yy/mm\"), (\"%d.%y.%m\", \"dd.yy.mm\"), (\"%m-%d-%y\", \"mm-dd-yy\"), (\"%m/%d/%y\", \"mm/dd/yy\"), (\"%m.%d.%y\", \"mm.dd.yy\"), (\"%m-%y-%d\",", "( \"state\", models.CharField( choices=[ (\"pending\", \"Pending\"), (\"uploaded\", \"Uploaded, ready to", "from django.db import migrations, models import django.db.models.deletion import django.utils.timezone import", "\"dd-mm-yyyy\"), (\"%d/%m/%Y\", \"dd/mm/yyyy\"), (\"%d.%m.%Y\", \"dd.mm.yyyy\"), (\"%d-%Y-%m\", \"dd-yyyy-mm\"), (\"%d/%Y/%m\", \"dd/yyyy/mm\"), (\"%d.%Y.%m\",", "primary_key=True, serialize=False, verbose_name=\"ID\" ), ), ( \"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False,", "(\"%m/%Y/%d\", \"mm/yyyy/dd\"), (\"%m.%Y.%d\", \"mm.yyyy.dd\"), (\"%Y-%d-%m\", \"yyyy-dd-mm\"), (\"%Y/%d/%m\", \"yyyy/dd/mm\"), (\"%Y.%d.%m\", \"yyyy.dd.mm\"),", "(\"column_number\", models.PositiveSmallIntegerField()), ( \"column_heading\", models.CharField(blank=True, default=\"\", max_length=100, verbose_name=\"Column\"), ), (", "\"uuid\", django_smalluuid.models.SmallUUIDField( default=django_smalluuid.models.UUIDDefault(), editable=False, unique=True ), ), ( \"timestamp\", models.DateTimeField(default=django.utils.timezone.now,", "), ], ), migrations.CreateModel( name=\"TransactionImportColumn\", fields=[ ( \"id\", models.AutoField( auto_created=True," ]
[ "Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message):", "bot\" bot = Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async", "from aiogram import Bot, types from aiogram.dispatcher import Dispatcher from", "await msg.answer('Привет!') else: await msg.answer('Я не понимаю') if __name__ ==", "def get_text_messages(msg: types.Message): if msg.text.lower() == 'привет': await msg.answer('Привет!') else:", "async def get_text_messages(msg: types.Message): if msg.text.lower() == 'привет': await msg.answer('Привет!')", "you bot\" bot = Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help'])", "executor TOKEN = \"Token for you bot\" bot = Bot(token=TOKEN)", "пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if msg.text.lower() == 'привет':", "types from aiogram.dispatcher import Dispatcher from aiogram.utils import executor TOKEN", "for you bot\" bot = Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start',", "msg.answer('Привет!') else: await msg.answer('Я не понимаю') if __name__ == '__main__':", "def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg:", "else: await msg.answer('Я не понимаю') if __name__ == '__main__': executor.start_polling(dp)", "'help']) async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async", "import Dispatcher from aiogram.utils import executor TOKEN = \"Token for", "aiogram.utils import executor TOKEN = \"Token for you bot\" bot", "from aiogram.dispatcher import Dispatcher from aiogram.utils import executor TOKEN =", "types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if", "send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message):", "await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if msg.text.lower()", "aiogram import Bot, types from aiogram.dispatcher import Dispatcher from aiogram.utils", "TOKEN = \"Token for you bot\" bot = Bot(token=TOKEN) dp", "@dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if msg.text.lower() == 'привет': await", "= \"Token for you bot\" bot = Bot(token=TOKEN) dp =", "= Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg:", "msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def get_text_messages(msg: types.Message): if msg.text.lower() ==", "'привет': await msg.answer('Привет!') else: await msg.answer('Я не понимаю') if __name__", "dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message): await", "Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}')", "if msg.text.lower() == 'привет': await msg.answer('Привет!') else: await msg.answer('Я не", "aiogram.dispatcher import Dispatcher from aiogram.utils import executor TOKEN = \"Token", "import Bot, types from aiogram.dispatcher import Dispatcher from aiogram.utils import", "@dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text'])", "== 'привет': await msg.answer('Привет!') else: await msg.answer('Я не понимаю') if", "bot = Bot(token=TOKEN) dp = Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def", "\"Token for you bot\" bot = Bot(token=TOKEN) dp = Dispatcher(bot)", "import executor TOKEN = \"Token for you bot\" bot =", "Bot, types from aiogram.dispatcher import Dispatcher from aiogram.utils import executor", "Dispatcher from aiogram.utils import executor TOKEN = \"Token for you", "msg.text.lower() == 'привет': await msg.answer('Привет!') else: await msg.answer('Я не понимаю')", "types.Message): if msg.text.lower() == 'привет': await msg.answer('Привет!') else: await msg.answer('Я", "get_text_messages(msg: types.Message): if msg.text.lower() == 'привет': await msg.answer('Привет!') else: await", "= Dispatcher(bot) @dp.message_handler(command=['start', 'help']) async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро", "async def send_welcome(msg: types.Message): await msg.reply_to_message(f'Добро пожаловать,{msg.from_user.first_name}') @dp.message_handler(content_types=['text']) async def", "from aiogram.utils import executor TOKEN = \"Token for you bot\"" ]
[ "return force_text(obj) return super(LazyEncoder, self).default(obj) def json_dumps(data): return json.dumps(data, cls=LazyEncoder)", "def default(self, obj): if isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder,", "'{1}' class.\".format( module_path, class_name ) raise ImportError(message) return getattr(module_itself, class_name)", "from PIL import Image from PIL import ImageFile except ImportError:", "try: import Image import ImageFile except ImportError: return False return", "< 2: message = \"'{0}' is not a complete Python", "LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Promise): return force_text(obj) return", "ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed(): try: from PIL import", "module_path = '.'.join(path_bits) module_itself = import_module(module_path) if not hasattr(module_itself, class_name):", "if not hasattr(module_itself, class_name): message = \"The Python module '{0}'", "if len(path_bits) < 2: message = \"'{0}' is not a", "import import_module try: from django.utils.encoding import force_text except ImportError: from", "message = \"The Python module '{0}' has no '{1}' class.\".format(", "from PIL import ImageFile except ImportError: try: import Image import", "except ImportError: from django.utils.encoding import force_unicode as force_text from django.utils.functional", "ImportError: return False return True class LazyEncoder(json.JSONEncoder): def default(self, obj):", "except ImportError: return False return True class LazyEncoder(json.JSONEncoder): def default(self,", "not hasattr(module_itself, class_name): message = \"The Python module '{0}' has", "ImageFile except ImportError: return False return True class LazyEncoder(json.JSONEncoder): def", "import Image from PIL import ImageFile except ImportError: try: import", "class_name = path_bits.pop() module_path = '.'.join(path_bits) module_itself = import_module(module_path) if", "= \"'{0}' is not a complete Python path.\".format(path) raise ImproperlyConfigured(message)", "Python module '{0}' has no '{1}' class.\".format( module_path, class_name )", "module_itself = import_module(module_path) if not hasattr(module_itself, class_name): message = \"The", "\"The Python module '{0}' has no '{1}' class.\".format( module_path, class_name", "no '{1}' class.\".format( module_path, class_name ) raise ImportError(message) return getattr(module_itself,", "ImproperlyConfigured from importlib import import_module try: from django.utils.encoding import force_text", "<reponame>danlgz/django-wysiwyg-redactor<filename>redactor/utils.py from django.core.exceptions import ImproperlyConfigured from importlib import import_module try:", "force_text from django.utils.functional import Promise import json def import_class(path): path_bits", "ImproperlyConfigured(message) class_name = path_bits.pop() module_path = '.'.join(path_bits) module_itself = import_module(module_path)", "2: message = \"'{0}' is not a complete Python path.\".format(path)", "json def import_class(path): path_bits = path.split('.') if len(path_bits) < 2:", "import force_text except ImportError: from django.utils.encoding import force_unicode as force_text", "obj): if isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder, self).default(obj) def", "'{0}' has no '{1}' class.\".format( module_path, class_name ) raise ImportError(message)", "a complete Python path.\".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path", "= import_module(module_path) if not hasattr(module_itself, class_name): message = \"The Python", "class.\".format( module_path, class_name ) raise ImportError(message) return getattr(module_itself, class_name) def", "PIL import ImageFile except ImportError: try: import Image import ImageFile", "import force_unicode as force_text from django.utils.functional import Promise import json", "class_name): message = \"The Python module '{0}' has no '{1}'", "django.utils.functional import Promise import json def import_class(path): path_bits = path.split('.')", "import ImproperlyConfigured from importlib import import_module try: from django.utils.encoding import", "django.core.exceptions import ImproperlyConfigured from importlib import import_module try: from django.utils.encoding", "django.utils.encoding import force_text except ImportError: from django.utils.encoding import force_unicode as", "has no '{1}' class.\".format( module_path, class_name ) raise ImportError(message) return", "try: from PIL import Image from PIL import ImageFile except", "= \"The Python module '{0}' has no '{1}' class.\".format( module_path,", "except ImportError: try: import Image import ImageFile except ImportError: return", "Promise import json def import_class(path): path_bits = path.split('.') if len(path_bits)", "raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path = '.'.join(path_bits) module_itself =", "return False return True class LazyEncoder(json.JSONEncoder): def default(self, obj): if", "path_bits.pop() module_path = '.'.join(path_bits) module_itself = import_module(module_path) if not hasattr(module_itself,", "'.'.join(path_bits) module_itself = import_module(module_path) if not hasattr(module_itself, class_name): message =", "getattr(module_itself, class_name) def is_module_image_installed(): try: from PIL import Image from", "if isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder, self).default(obj) def json_dumps(data):", "message = \"'{0}' is not a complete Python path.\".format(path) raise", "Python path.\".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path = '.'.join(path_bits)", "path.\".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path = '.'.join(path_bits) module_itself", "not a complete Python path.\".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop()", "module '{0}' has no '{1}' class.\".format( module_path, class_name ) raise", "from django.core.exceptions import ImproperlyConfigured from importlib import import_module try: from", "Image import ImageFile except ImportError: return False return True class", "isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder, self).default(obj) def json_dumps(data): return", "complete Python path.\".format(path) raise ImproperlyConfigured(message) class_name = path_bits.pop() module_path =", "import Image import ImageFile except ImportError: return False return True", "importlib import import_module try: from django.utils.encoding import force_text except ImportError:", "False return True class LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj,", "import ImageFile except ImportError: try: import Image import ImageFile except", "True class LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Promise): return", "module_path, class_name ) raise ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed():", "default(self, obj): if isinstance(obj, Promise): return force_text(obj) return super(LazyEncoder, self).default(obj)", "is_module_image_installed(): try: from PIL import Image from PIL import ImageFile", "as force_text from django.utils.functional import Promise import json def import_class(path):", "return True class LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Promise):", "def is_module_image_installed(): try: from PIL import Image from PIL import", "is not a complete Python path.\".format(path) raise ImproperlyConfigured(message) class_name =", "= '.'.join(path_bits) module_itself = import_module(module_path) if not hasattr(module_itself, class_name): message", "django.utils.encoding import force_unicode as force_text from django.utils.functional import Promise import", "hasattr(module_itself, class_name): message = \"The Python module '{0}' has no", "import ImageFile except ImportError: return False return True class LazyEncoder(json.JSONEncoder):", "force_unicode as force_text from django.utils.functional import Promise import json def", "path_bits = path.split('.') if len(path_bits) < 2: message = \"'{0}'", "from django.utils.encoding import force_unicode as force_text from django.utils.functional import Promise", "from django.utils.encoding import force_text except ImportError: from django.utils.encoding import force_unicode", "import Promise import json def import_class(path): path_bits = path.split('.') if", "PIL import Image from PIL import ImageFile except ImportError: try:", "ImageFile except ImportError: try: import Image import ImageFile except ImportError:", "ImportError: from django.utils.encoding import force_unicode as force_text from django.utils.functional import", "force_text except ImportError: from django.utils.encoding import force_unicode as force_text from", "return getattr(module_itself, class_name) def is_module_image_installed(): try: from PIL import Image", ") raise ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed(): try: from", "ImportError: try: import Image import ImageFile except ImportError: return False", "from importlib import import_module try: from django.utils.encoding import force_text except", "Promise): return force_text(obj) return super(LazyEncoder, self).default(obj) def json_dumps(data): return json.dumps(data,", "import_module try: from django.utils.encoding import force_text except ImportError: from django.utils.encoding", "import json def import_class(path): path_bits = path.split('.') if len(path_bits) <", "import_class(path): path_bits = path.split('.') if len(path_bits) < 2: message =", "try: from django.utils.encoding import force_text except ImportError: from django.utils.encoding import", "raise ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed(): try: from PIL", "def import_class(path): path_bits = path.split('.') if len(path_bits) < 2: message", "\"'{0}' is not a complete Python path.\".format(path) raise ImproperlyConfigured(message) class_name", "class LazyEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, Promise): return force_text(obj)", "len(path_bits) < 2: message = \"'{0}' is not a complete", "import_module(module_path) if not hasattr(module_itself, class_name): message = \"The Python module", "from django.utils.functional import Promise import json def import_class(path): path_bits =", "= path.split('.') if len(path_bits) < 2: message = \"'{0}' is", "= path_bits.pop() module_path = '.'.join(path_bits) module_itself = import_module(module_path) if not", "class_name ) raise ImportError(message) return getattr(module_itself, class_name) def is_module_image_installed(): try:", "path.split('.') if len(path_bits) < 2: message = \"'{0}' is not", "Image from PIL import ImageFile except ImportError: try: import Image", "class_name) def is_module_image_installed(): try: from PIL import Image from PIL" ]
[ "value of the process variable to be controlled, The time", "def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the PID command with automatic", "_boundCmd(self, cmd): \"\"\"Bounds the command within the range _cmdMin to", "LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A", "maximum command range. Commands calculated outside the cmdMin and cmdMax", "PID error terms and timer.\"\"\" self._errorIntegral = 0. self._errorPrevious =", "= self._cmdMin elif cmd > self._cmdMax: cmd = self._cmdMax return", "def getCmd(self, setPoint, procVar): \"\"\"Gets the PID command without time", "\"\"\"Sets the proportional, integral and derivative terms.\"\"\" self._kp = kp", "respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax = cmdMax self._boundRange = True", "self._boundRange = False self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime", "this repository for detailed # documentation. import time class TimedPID:", "= 0.): self._kp = kp self._ki = ki self._kd =", "= 0. self._lastCmdTime = time.time() def getCmd(self, setPoint, procVar): \"\"\"Gets", "desired process set point, procVar is the current value of", "TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR", "the time step.\"\"\" # Calculate error terms error = setPoint", "current value of the process variable to be controlled, timeStep", "rights # to use, copy, modify, merge, publish, distribute, sublicense,", "can be found at # https://github.com/DrGFreeman/TimedPID. Refer to this repository", "Return bound command return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets", "kp = 1., ki = 0., kd = 0.): self._kp", "portions of the Software. # # THE SOFTWARE IS PROVIDED", "= cmdMin self._cmdMax = cmdMax self._boundRange = True def setGains(self,", "step calculation. setPoint is the desired process set point, procVar", "Calculate error terms error = setPoint - procVar self._errorIntegral +=", "controlled, timeStep is the time step.\"\"\" # Calculate error terms", "# # The above copyright notice and this permission notice", "= time.time() # Private methods def _boundCmd(self, cmd): \"\"\"Bounds the", "/ 2 * timeStep errorDerivative = (error - self._errorPrevious) /", "time.time() # Private methods def _boundCmd(self, cmd): \"\"\"Bounds the command", "- self._lastCmdTime # Set last time method was called to", "Constructor def __init__(self, kp = 1., ki = 0., kd", "time step currentTime = time.time() timeStep = currentTime - self._lastCmdTime", "calculated outside the cmdMin and cmdMax will be set to", "(error - self._errorPrevious) / timeStep # Set last error to", "and associated documentation files (the \"Software\"), to deal # in", "Software without restriction, including without limitation the rights # to", "last time method was called to current time self._lastCmdTime =", "and to permit persons to whom the Software is #", "the cmdMin and cmdMax will be set to cmdMin or", "copies of the Software, and to permit persons to whom", "hereby granted, free of charge, to any person obtaining a", "value of the process variable to be controlled, timeStep is", "this permission notice shall be included in all # copies", "timeStep # Set last error to current error self._errorPrevious =", "for detailed # documentation. import time class TimedPID: # Constructor", "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE", "distribute, sublicense, and/or sell # copies of the Software, and", "to be controlled. No time step is used (assumed =", "OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.", "defines a simple Proportional - Integral - Derivative (PID) #", "method was called to current time self._lastCmdTime = currentTime #", "HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER #", "No time step is used (assumed = 1).\"\"\" # Calculate", "getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets the PID command with a", "* timeStep errorDerivative = (error - self._errorPrevious) / timeStep #", "error = setPoint - procVar self._errorIntegral += error errorDerivative =", "= 1., ki = 0., kd = 0.): \"\"\"Sets the", "setPoint is the desired process set point, procVar is the", "1).\"\"\" # Calculate error terms error = setPoint - procVar", "used (assumed = 1).\"\"\" # Calculate error terms error =", "import time class TimedPID: # Constructor def __init__(self, kp =", "# Copyright (c) 2017 <NAME> <<EMAIL>> # # Permission is", "None self._boundRange = False self._errorIntegral = 0. self._errorPrevious = 0.", "python # implementation of my Arduino TimedPID library which can", "PID command with automatic time step calculation. setPoint is the", "OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH", "self._kp * error + self._ki * self._errorIntegral + \\ self._kd", "(PID) # controller with different time step calculation methods. This", "deal # in the Software without restriction, including without limitation", "self._kd = kd self._cmdMin = None self._cmdMax = None self._boundRange", "use, copy, modify, merge, publish, distribute, sublicense, and/or sell #", "= time.time() def getCmd(self, setPoint, procVar): \"\"\"Gets the PID command", "return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets", "be included in all # copies or substantial portions of", "OR OTHER DEALINGS IN THE # SOFTWARE. # This module", "command without time step. setPoint is the desired process set", "the Software. # # THE SOFTWARE IS PROVIDED \"AS IS\",", "+ \\ self._kd * errorDerivative # Return bound command return", "= 0. self._errorPrevious = 0. self._lastCmdTime = time.time() def getCmd(self,", "timer.\"\"\" self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time()", "copy, modify, merge, publish, distribute, sublicense, and/or sell # copies", "# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR", "# This module defines a simple Proportional - Integral -", "with automatic time step calculation. setPoint is the desired process", "module defines a simple Proportional - Integral - Derivative (PID)", "is the time step.\"\"\" # Calculate error terms error =", "def __init__(self, kp = 1., ki = 0., kd =", "the proportional, integral and derivative terms.\"\"\" self._kp = kp self._ki", "to cmdMin or cmdMax respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax =", "software and associated documentation files (the \"Software\"), to deal #", "kp = 1., ki = 0., kd = 0.): \"\"\"Sets", "methods. This is a python # implementation of my Arduino", "Calculate time step currentTime = time.time() timeStep = currentTime -", "currentTime # Get command return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self,", "self._lastCmdTime = time.time() def getCmd(self, setPoint, procVar): \"\"\"Gets the PID", "# THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF", "AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR", "self._cmdMin = None self._cmdMax = None self._boundRange = False self._errorIntegral", "the Software without restriction, including without limitation the rights #", "# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF", "- procVar self._errorIntegral += error errorDerivative = error - self._errorPrevious", "self._errorPrevious = 0. self._lastCmdTime = time.time() # Private methods def", "error terms error = setPoint - procVar self._errorIntegral += error", "the PID command with a specified time step. setPoint is", "cmd < self._cmdMin: cmd = self._cmdMin elif cmd > self._cmdMax:", "\"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR #", "= None self._cmdMax = None self._boundRange = False self._errorIntegral =", "command within the range _cmdMin to _cmdMax.\"\"\" if self._boundRange: if", "errorDerivative = (error - self._errorPrevious) / timeStep # Set last", "time step calculation. setPoint is the desired process set point,", "= 0. self._lastCmdTime = time.time() # Private methods def _boundCmd(self,", "= True def setGains(self, kp = 1., ki = 0.,", "ki = 0., kd = 0.): self._kp = kp self._ki", "cmdMin or cmdMax respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax = cmdMax", "included in all # copies or substantial portions of the", "# of this software and associated documentation files (the \"Software\"),", "furnished to do so, subject to the following conditions: #", "to do so, subject to the following conditions: # #", "# The above copyright notice and this permission notice shall", "SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR", "a copy # of this software and associated documentation files", "OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF", "procVar is the current value of the process variable to", "a simple Proportional - Integral - Derivative (PID) # controller", "TimedPID library which can be found at # https://github.com/DrGFreeman/TimedPID. Refer", "= self._kp * error + self._ki * self._errorIntegral + \\", "MIT License # # Copyright (c) 2017 <NAME> <<EMAIL>> #", "= (error - self._errorPrevious) / timeStep # Set last error", "permission notice shall be included in all # copies or", "0. self._errorPrevious = 0. self._lastCmdTime = time.time() def getCmd(self, setPoint,", "cmdMax respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax = cmdMax self._boundRange =", "command cmd = self._kp * error + self._ki * self._errorIntegral", "simple Proportional - Integral - Derivative (PID) # controller with", "self._ki = ki self._kd = kd def reset(self): \"\"\"Resets the", "https://github.com/DrGFreeman/TimedPID. Refer to this repository for detailed # documentation. import", "# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO", "self._kd * errorDerivative # Return bound command return self._boundCmd(cmd) def", "is the current value of the process variable to be", "IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS", "NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE", "following conditions: # # The above copyright notice and this", "PID command without time step. setPoint is the desired process", "to deal # in the Software without restriction, including without", "conditions: # # The above copyright notice and this permission", "The time step is calculated as the time since the", "SOFTWARE OR THE USE OR OTHER DEALINGS IN THE #", "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell", "TimedPID: # Constructor def __init__(self, kp = 1., ki =", "# Calculate command cmd = self._kp * error + self._ki", "IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS", "cmdMin, cmdMax): \"\"\"Sets the maximum command range. Commands calculated outside", "self._boundRange: if cmd < self._cmdMin: cmd = self._cmdMin elif cmd", "= 0. self._errorPrevious = 0. self._lastCmdTime = time.time() # Private", "\"\"\"Bounds the command within the range _cmdMin to _cmdMax.\"\"\" if", "Set last error to current error self._errorPrevious = error #", "without time step. setPoint is the desired process set point,", "self._errorPrevious = 0. self._lastCmdTime = time.time() def getCmd(self, setPoint, procVar):", "FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN", "of the process variable to be controlled, timeStep is the", "/ timeStep # Set last error to current error self._errorPrevious", "# Calculate time step currentTime = time.time() timeStep = currentTime", "cmdMax self._boundRange = True def setGains(self, kp = 1., ki", "= error - self._errorPrevious # Set last error to current", "library which can be found at # https://github.com/DrGFreeman/TimedPID. Refer to", "time step.\"\"\" # Calculate error terms error = setPoint -", "- procVar self._errorIntegral += (error + self._errorPrevious) / 2 *", "procVar self._errorIntegral += error errorDerivative = error - self._errorPrevious #", "self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time() def", "cmd = self._cmdMin elif cmd > self._cmdMax: cmd = self._cmdMax", "methods def _boundCmd(self, cmd): \"\"\"Bounds the command within the range", "cmdMin self._cmdMax = cmdMax self._boundRange = True def setGains(self, kp", "and/or sell # copies of the Software, and to permit", "the rights # to use, copy, modify, merge, publish, distribute,", "self._errorIntegral += error errorDerivative = error - self._errorPrevious # Set", "all # copies or substantial portions of the Software. #", "< self._cmdMin: cmd = self._cmdMin elif cmd > self._cmdMax: cmd", "process variable to be controlled, timeStep is the time step.\"\"\"", "= kd def reset(self): \"\"\"Resets the PID error terms and", "notice and this permission notice shall be included in all", "command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the PID", "is hereby granted, free of charge, to any person obtaining", "* error + self._ki * self._errorIntegral + \\ self._kd *", "= 0., kd = 0.): self._kp = kp self._ki =", "calculated as the time since the last call to the", "CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR", "= setPoint - procVar self._errorIntegral += error errorDerivative = error", "person obtaining a copy # of this software and associated", "# # Permission is hereby granted, free of charge, to", "without restriction, including without limitation the rights # to use,", "Return bound command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets", "timeStep = currentTime - self._lastCmdTime # Set last time method", "0., kd = 0.): \"\"\"Sets the proportional, integral and derivative", "\\ self._kd * errorDerivative # Return bound command return self._boundCmd(cmd)", "subject to the following conditions: # # The above copyright", "different time step calculation methods. This is a python #", "was called to current time self._lastCmdTime = currentTime # Get", "self._kp = kp self._ki = ki self._kd = kd def", "time.time() def getCmd(self, setPoint, procVar): \"\"\"Gets the PID command without", "= None self._boundRange = False self._errorIntegral = 0. self._errorPrevious =", "WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN", "self._cmdMin = cmdMin self._cmdMax = cmdMax self._boundRange = True def", "\"\"\"Resets the PID error terms and timer.\"\"\" self._errorIntegral = 0.", "step currentTime = time.time() timeStep = currentTime - self._lastCmdTime #", "THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE", "terms error = setPoint - procVar self._errorIntegral += error errorDerivative", "self._boundRange = True def setGains(self, kp = 1., ki =", "\"\"\"Gets the PID command with a specified time step. setPoint", "of the process variable to be controlled, The time step", "time since the last call to the method.\"\"\" # Calculate", "or substantial portions of the Software. # # THE SOFTWARE", "<NAME> <<EMAIL>> # # Permission is hereby granted, free of", "last error to current error self._errorPrevious = error # Calculate", "self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the maximum command range.", "BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS", "currentTime - self._lastCmdTime # Set last time method was called", "FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL", "implementation of my Arduino TimedPID library which can be found", "OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR", "procVar): \"\"\"Gets the PID command without time step. setPoint is", "point, procVar is the current value of the process variable", "return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the maximum command", "error terms error = setPoint - procVar self._errorIntegral += (error", "CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS", "set point, procVar is the current value of the process", "IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER", "setGains(self, kp = 1., ki = 0., kd = 0.):", "step calculation methods. This is a python # implementation of", "CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION", "# Permission is hereby granted, free of charge, to any", "of charge, to any person obtaining a copy # of", "This is a python # implementation of my Arduino TimedPID", "SOFTWARE. # This module defines a simple Proportional - Integral", "INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, #", "merge, publish, distribute, sublicense, and/or sell # copies of the", "* errorDerivative # Return bound command return self._boundCmd(cmd) def getCmdAutoStep(self,", "# Return bound command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar):", "# # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY", "= kp self._ki = ki self._kd = kd self._cmdMin =", "cmdMax): \"\"\"Sets the maximum command range. Commands calculated outside the", "NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT", "= 1).\"\"\" # Calculate error terms error = setPoint -", "to the method.\"\"\" # Calculate time step currentTime = time.time()", "self._cmdMax = None self._boundRange = False self._errorIntegral = 0. self._errorPrevious", "repository for detailed # documentation. import time class TimedPID: #", "NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR", "as the time since the last call to the method.\"\"\"", "error self._errorPrevious = error # Calculate command cmd = self._kp", "self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets the", "reset(self): \"\"\"Resets the PID error terms and timer.\"\"\" self._errorIntegral =", "cmd): \"\"\"Bounds the command within the range _cmdMin to _cmdMax.\"\"\"", "0. self._lastCmdTime = time.time() def getCmd(self, setPoint, procVar): \"\"\"Gets the", "# MIT License # # Copyright (c) 2017 <NAME> <<EMAIL>>", "_cmdMax.\"\"\" if self._boundRange: if cmd < self._cmdMin: cmd = self._cmdMin", "the process variable to be controlled. No time step is", "timeStep is the time step.\"\"\" # Calculate error terms error", "# documentation. import time class TimedPID: # Constructor def __init__(self,", "LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER", "self._ki = ki self._kd = kd self._cmdMin = None self._cmdMax", "calculation. setPoint is the desired process set point, procVar is", "outside the cmdMin and cmdMax will be set to cmdMin", "self._lastCmdTime = time.time() # Private methods def _boundCmd(self, cmd): \"\"\"Bounds", "specified time step. setPoint is the desired process set point,", "so, subject to the following conditions: # # The above", "terms and timer.\"\"\" self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime", "AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #", "DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF", "bound command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the", "* errorDerivative # Return bound command return self._boundCmd(cmd) def setCmdRange(self,", "time self._lastCmdTime = currentTime # Get command return self.getCmdStep(setPoint, procVar,", "self._cmdMin elif cmd > self._cmdMax: cmd = self._cmdMax return cmd", "the time since the last call to the method.\"\"\" #", "the following conditions: # # The above copyright notice and", "FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE", "variable to be controlled, timeStep is the time step.\"\"\" #", "return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the PID command", "documentation. import time class TimedPID: # Constructor def __init__(self, kp", "= kd self._cmdMin = None self._cmdMax = None self._boundRange =", "+= error errorDerivative = error - self._errorPrevious # Set last", "error # Calculate command cmd = self._kp * error +", "Integral - Derivative (PID) # controller with different time step", "THE USE OR OTHER DEALINGS IN THE # SOFTWARE. #", "is calculated as the time since the last call to", "be set to cmdMin or cmdMax respectively.\"\"\" self._cmdMin = cmdMin", "= cmdMax self._boundRange = True def setGains(self, kp = 1.,", "the process variable to be controlled, The time step is", "self._cmdMin: cmd = self._cmdMin elif cmd > self._cmdMax: cmd =", "0. self._errorPrevious = 0. self._lastCmdTime = time.time() # Private methods", "the Software, and to permit persons to whom the Software", "at # https://github.com/DrGFreeman/TimedPID. Refer to this repository for detailed #", "be controlled. No time step is used (assumed = 1).\"\"\"", "timedpid.py # Source: https://github.com/DrGFreeman/PyTools # # MIT License # #", "0., kd = 0.): self._kp = kp self._ki = ki", "# Calculate error terms error = setPoint - procVar self._errorIntegral", "process set point, procVar is the current value of the", "- Derivative (PID) # controller with different time step calculation", "the range _cmdMin to _cmdMax.\"\"\" if self._boundRange: if cmd <", "ki self._kd = kd self._cmdMin = None self._cmdMax = None", "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT", "persons to whom the Software is # furnished to do", "# controller with different time step calculation methods. This is", "procVar, timeStep) def getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets the PID", "proportional, integral and derivative terms.\"\"\" self._kp = kp self._ki =", "kp self._ki = ki self._kd = kd self._cmdMin = None", "associated documentation files (the \"Software\"), to deal # in the", "setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the maximum command range. Commands calculated", "Arduino TimedPID library which can be found at # https://github.com/DrGFreeman/TimedPID.", "since the last call to the method.\"\"\" # Calculate time", "MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN", "True def setGains(self, kp = 1., ki = 0., kd", "- Integral - Derivative (PID) # controller with different time", "Software. # # THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT", "the method.\"\"\" # Calculate time step currentTime = time.time() timeStep", "to any person obtaining a copy # of this software", "with a specified time step. setPoint is the desired process", "derivative terms.\"\"\" self._kp = kp self._ki = ki self._kd =", "command return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint, procVar, timeStep):", "in all # copies or substantial portions of the Software.", "of the Software, and to permit persons to whom the", "this software and associated documentation files (the \"Software\"), to deal", "time class TimedPID: # Constructor def __init__(self, kp = 1.,", "ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN", "BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY,", "- self._errorPrevious) / timeStep # Set last error to current", "found at # https://github.com/DrGFreeman/TimedPID. Refer to this repository for detailed", "0.): self._kp = kp self._ki = ki self._kd = kd", "= setPoint - procVar self._errorIntegral += (error + self._errorPrevious) /", "shall be included in all # copies or substantial portions", "a python # implementation of my Arduino TimedPID library which", "Software is # furnished to do so, subject to the", "setPoint, procVar): \"\"\"Gets the PID command without time step. setPoint", "PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR", "class TimedPID: # Constructor def __init__(self, kp = 1., ki", "detailed # documentation. import time class TimedPID: # Constructor def", "whom the Software is # furnished to do so, subject", "sublicense, and/or sell # copies of the Software, and to", "= currentTime - self._lastCmdTime # Set last time method was", "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY", "or cmdMax respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax = cmdMax self._boundRange", "substantial portions of the Software. # # THE SOFTWARE IS", "notice shall be included in all # copies or substantial", "# # Copyright (c) 2017 <NAME> <<EMAIL>> # # Permission", "False self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time()", "if cmd < self._cmdMin: cmd = self._cmdMin elif cmd >", "# timedpid.py # Source: https://github.com/DrGFreeman/PyTools # # MIT License #", "def getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets the PID command with", "value of the process variable to be controlled. No time", "do so, subject to the following conditions: # # The", "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,", "WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING", "Calculate command cmd = self._kp * error + self._ki *", "self._lastCmdTime # Set last time method was called to current", "my Arduino TimedPID library which can be found at #", "in the Software without restriction, including without limitation the rights", "# Source: https://github.com/DrGFreeman/PyTools # # MIT License # # Copyright", "controlled. No time step is used (assumed = 1).\"\"\" #", "terms error = setPoint - procVar self._errorIntegral += (error +", "= False self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime =", "= ki self._kd = kd def reset(self): \"\"\"Resets the PID", "def _boundCmd(self, cmd): \"\"\"Bounds the command within the range _cmdMin", "time step is calculated as the time since the last", "time method was called to current time self._lastCmdTime = currentTime", "# furnished to do so, subject to the following conditions:", "the desired process set point, procVar is the current value", "any person obtaining a copy # of this software and", "ARISING FROM, # OUT OF OR IN CONNECTION WITH THE", "SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,", "error errorDerivative = error - self._errorPrevious # Set last error", "KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO", "self._kd = kd def reset(self): \"\"\"Resets the PID error terms", "self._lastCmdTime = currentTime # Get command return self.getCmdStep(setPoint, procVar, timeStep)", "OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES", "restriction, including without limitation the rights # to use, copy,", "getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the PID command with automatic time", "= 1., ki = 0., kd = 0.): self._kp =", "1., ki = 0., kd = 0.): \"\"\"Sets the proportional,", "IN THE # SOFTWARE. # This module defines a simple", "range _cmdMin to _cmdMax.\"\"\" if self._boundRange: if cmd < self._cmdMin:", "and cmdMax will be set to cmdMin or cmdMax respectively.\"\"\"", "kd = 0.): \"\"\"Sets the proportional, integral and derivative terms.\"\"\"", "including without limitation the rights # to use, copy, modify,", "error + self._ki * self._errorIntegral + \\ self._kd * errorDerivative", "copyright notice and this permission notice shall be included in", "Copyright (c) 2017 <NAME> <<EMAIL>> # # Permission is hereby", "self._kp = kp self._ki = ki self._kd = kd self._cmdMin", "kd self._cmdMin = None self._cmdMax = None self._boundRange = False", "command with a specified time step. setPoint is the desired", "ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED", "0. self._lastCmdTime = time.time() # Private methods def _boundCmd(self, cmd):", "free of charge, to any person obtaining a copy #", "1., ki = 0., kd = 0.): self._kp = kp", "files (the \"Software\"), to deal # in the Software without", "is the desired process set point, procVar is the current", "self._errorPrevious) / timeStep # Set last error to current error", "0.): \"\"\"Sets the proportional, integral and derivative terms.\"\"\" self._kp =", "be found at # https://github.com/DrGFreeman/TimedPID. Refer to this repository for", "call to the method.\"\"\" # Calculate time step currentTime =", "error to current error self._errorPrevious = error # Calculate command", "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,", "controlled, The time step is calculated as the time since", "of the Software. # # THE SOFTWARE IS PROVIDED \"AS", "with different time step calculation methods. This is a python", "the command within the range _cmdMin to _cmdMax.\"\"\" if self._boundRange:", "command range. Commands calculated outside the cmdMin and cmdMax will", "the maximum command range. Commands calculated outside the cmdMin and", "a specified time step. setPoint is the desired process set", "self._errorPrevious # Set last error to current error self._errorPrevious =", "\"\"\"Sets the maximum command range. Commands calculated outside the cmdMin", "2017 <NAME> <<EMAIL>> # # Permission is hereby granted, free", "command return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the maximum", "# Get command return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint,", "(c) 2017 <NAME> <<EMAIL>> # # Permission is hereby granted,", "procVar, timeStep): \"\"\"Gets the PID command with a specified time", "be controlled, timeStep is the time step.\"\"\" # Calculate error", "kd def reset(self): \"\"\"Resets the PID error terms and timer.\"\"\"", "of this software and associated documentation files (the \"Software\"), to", "OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR", "OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE", "to this repository for detailed # documentation. import time class", "set to cmdMin or cmdMax respectively.\"\"\" self._cmdMin = cmdMin self._cmdMax", "EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE", "Proportional - Integral - Derivative (PID) # controller with different", "def reset(self): \"\"\"Resets the PID error terms and timer.\"\"\" self._errorIntegral", "time step. setPoint is the desired process set point, procVar", "USE OR OTHER DEALINGS IN THE # SOFTWARE. # This", "(assumed = 1).\"\"\" # Calculate error terms error = setPoint", "setPoint, procVar): \"\"\"Gets the PID command with automatic time step", "# # MIT License # # Copyright (c) 2017 <NAME>", "None self._cmdMax = None self._boundRange = False self._errorIntegral = 0.", "# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,", "__init__(self, kp = 1., ki = 0., kd = 0.):", "kp self._ki = ki self._kd = kd def reset(self): \"\"\"Resets", "PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS", "error = setPoint - procVar self._errorIntegral += (error + self._errorPrevious)", "_cmdMin to _cmdMax.\"\"\" if self._boundRange: if cmd < self._cmdMin: cmd", "errorDerivative # Return bound command return self._boundCmd(cmd) def setCmdRange(self, cmdMin,", "(the \"Software\"), to deal # in the Software without restriction,", "self._cmdMax = cmdMax self._boundRange = True def setGains(self, kp =", "the PID error terms and timer.\"\"\" self._errorIntegral = 0. self._errorPrevious", "is a python # implementation of my Arduino TimedPID library", "kd = 0.): self._kp = kp self._ki = ki self._kd", "WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND", "errorDerivative # Return bound command return self._boundCmd(cmd) def getCmdAutoStep(self, setPoint,", "charge, to any person obtaining a copy # of this", "permit persons to whom the Software is # furnished to", "to current time self._lastCmdTime = currentTime # Get command return", "License # # Copyright (c) 2017 <NAME> <<EMAIL>> # #", "current error self._errorPrevious = error # Calculate command cmd =", "THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE", "the Software is # furnished to do so, subject to", "the current value of the process variable to be controlled.", "above copyright notice and this permission notice shall be included", "IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED,", "self._errorIntegral + \\ self._kd * errorDerivative # Return bound command", "A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE", "limitation the rights # to use, copy, modify, merge, publish,", "error - self._errorPrevious # Set last error to current error", "last call to the method.\"\"\" # Calculate time step currentTime", "# Set last time method was called to current time", "is used (assumed = 1).\"\"\" # Calculate error terms error", "THE # SOFTWARE. # This module defines a simple Proportional", "errorDerivative = error - self._errorPrevious # Set last error to", "# https://github.com/DrGFreeman/TimedPID. Refer to this repository for detailed # documentation.", "PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE #", "range. Commands calculated outside the cmdMin and cmdMax will be", "# SOFTWARE. # This module defines a simple Proportional -", "terms.\"\"\" self._kp = kp self._ki = ki self._kd = kd", "without limitation the rights # to use, copy, modify, merge,", "# Return bound command return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax):", "to be controlled, The time step is calculated as the", "def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the maximum command range. Commands", "controller with different time step calculation methods. This is a", "def setGains(self, kp = 1., ki = 0., kd =", "Commands calculated outside the cmdMin and cmdMax will be set", "# copies or substantial portions of the Software. # #", "EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE", "PID command with a specified time step. setPoint is the", "setPoint - procVar self._errorIntegral += (error + self._errorPrevious) / 2", "ki = 0., kd = 0.): \"\"\"Sets the proportional, integral", "# in the Software without restriction, including without limitation the", "self._errorPrevious = error # Calculate command cmd = self._kp *", "documentation files (the \"Software\"), to deal # in the Software", "the PID command without time step. setPoint is the desired", "process variable to be controlled, The time step is calculated", "copies or substantial portions of the Software. # # THE", "current time self._lastCmdTime = currentTime # Get command return self.getCmdStep(setPoint,", "timeStep): \"\"\"Gets the PID command with a specified time step.", "= 0.): \"\"\"Sets the proportional, integral and derivative terms.\"\"\" self._kp", "step. setPoint is the desired process set point, procVar is", "https://github.com/DrGFreeman/PyTools # # MIT License # # Copyright (c) 2017", "timeStep errorDerivative = (error - self._errorPrevious) / timeStep # Set", "method.\"\"\" # Calculate time step currentTime = time.time() timeStep =", "This module defines a simple Proportional - Integral - Derivative", "of my Arduino TimedPID library which can be found at", "setPoint, procVar, timeStep): \"\"\"Gets the PID command with a specified", "ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT", "= ki self._kd = kd self._cmdMin = None self._cmdMax =", "Set last time method was called to current time self._lastCmdTime", "sell # copies of the Software, and to permit persons", "self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime = time.time() #", "OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT", "the current value of the process variable to be controlled,", "command with automatic time step calculation. setPoint is the desired", "self._errorPrevious) / 2 * timeStep errorDerivative = (error - self._errorPrevious)", "DEALINGS IN THE # SOFTWARE. # This module defines a", "to _cmdMax.\"\"\" if self._boundRange: if cmd < self._cmdMin: cmd =", "OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT,", "publish, distribute, sublicense, and/or sell # copies of the Software,", "automatic time step calculation. setPoint is the desired process set", "to the following conditions: # # The above copyright notice", "calculation methods. This is a python # implementation of my", "self._boundCmd(cmd) def getCmdAutoStep(self, setPoint, procVar): \"\"\"Gets the PID command with", "error terms and timer.\"\"\" self._errorIntegral = 0. self._errorPrevious = 0.", "and this permission notice shall be included in all #", "* self._errorIntegral + \\ self._kd * errorDerivative # Return bound", "Source: https://github.com/DrGFreeman/PyTools # # MIT License # # Copyright (c)", "\"\"\"Gets the PID command without time step. setPoint is the", "modify, merge, publish, distribute, sublicense, and/or sell # copies of", "variable to be controlled, The time step is calculated as", "OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION", "procVar self._errorIntegral += (error + self._errorPrevious) / 2 * timeStep", "self._errorIntegral += (error + self._errorPrevious) / 2 * timeStep errorDerivative", "IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,", "to current error self._errorPrevious = error # Calculate command cmd", "which can be found at # https://github.com/DrGFreeman/TimedPID. Refer to this", "Software, and to permit persons to whom the Software is", "= error # Calculate command cmd = self._kp * error", "# to use, copy, modify, merge, publish, distribute, sublicense, and/or", "OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT", "cmd = self._kp * error + self._ki * self._errorIntegral +", "= time.time() timeStep = currentTime - self._lastCmdTime # Set last", "# Constructor def __init__(self, kp = 1., ki = 0.,", "getCmd(self, setPoint, procVar): \"\"\"Gets the PID command without time step.", "# Set last error to current error self._errorPrevious = error", "the process variable to be controlled, timeStep is the time", "Get command return self.getCmdStep(setPoint, procVar, timeStep) def getCmdStep(self, setPoint, procVar,", "\"Software\"), to deal # in the Software without restriction, including", "\"\"\"Gets the PID command with automatic time step calculation. setPoint", "OTHER DEALINGS IN THE # SOFTWARE. # This module defines", "(error + self._errorPrevious) / 2 * timeStep errorDerivative = (error", "will be set to cmdMin or cmdMax respectively.\"\"\" self._cmdMin =", "= currentTime # Get command return self.getCmdStep(setPoint, procVar, timeStep) def", "# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR", "step.\"\"\" # Calculate error terms error = setPoint - procVar", "variable to be controlled. No time step is used (assumed", "time step is used (assumed = 1).\"\"\" # Calculate error", "Private methods def _boundCmd(self, cmd): \"\"\"Bounds the command within the", "COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER", "step is calculated as the time since the last call", "+= (error + self._errorPrevious) / 2 * timeStep errorDerivative =", "current value of the process variable to be controlled. No", "called to current time self._lastCmdTime = currentTime # Get command", "bound command return self._boundCmd(cmd) def setCmdRange(self, cmdMin, cmdMax): \"\"\"Sets the", "and derivative terms.\"\"\" self._kp = kp self._ki = ki self._kd", "# copies of the Software, and to permit persons to", "cmdMin and cmdMax will be set to cmdMin or cmdMax", "time step calculation methods. This is a python # implementation", "currentTime = time.time() timeStep = currentTime - self._lastCmdTime # Set", "cmdMax will be set to cmdMin or cmdMax respectively.\"\"\" self._cmdMin", "- self._errorPrevious # Set last error to current error self._errorPrevious", "setPoint - procVar self._errorIntegral += error errorDerivative = error -", "+ self._errorPrevious) / 2 * timeStep errorDerivative = (error -", "step is used (assumed = 1).\"\"\" # Calculate error terms", "granted, free of charge, to any person obtaining a copy", "ki self._kd = kd def reset(self): \"\"\"Resets the PID error", "obtaining a copy # of this software and associated documentation", "time.time() timeStep = currentTime - self._lastCmdTime # Set last time", "TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN", "is # furnished to do so, subject to the following", "timeStep) def getCmdStep(self, setPoint, procVar, timeStep): \"\"\"Gets the PID command", "to whom the Software is # furnished to do so,", "copy # of this software and associated documentation files (the", "within the range _cmdMin to _cmdMax.\"\"\" if self._boundRange: if cmd", "THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY", "OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE.", "Permission is hereby granted, free of charge, to any person", "process variable to be controlled. No time step is used", "# implementation of my Arduino TimedPID library which can be", "2 * timeStep errorDerivative = (error - self._errorPrevious) / timeStep", "and timer.\"\"\" self._errorIntegral = 0. self._errorPrevious = 0. self._lastCmdTime =", "The above copyright notice and this permission notice shall be", "to be controlled, timeStep is the time step.\"\"\" # Calculate", "self._ki * self._errorIntegral + \\ self._kd * errorDerivative # Return", "= kp self._ki = ki self._kd = kd def reset(self):", "be controlled, The time step is calculated as the time", "of the process variable to be controlled. No time step", "= 0., kd = 0.): \"\"\"Sets the proportional, integral and", "the PID command with automatic time step calculation. setPoint is", "Derivative (PID) # controller with different time step calculation methods.", "<<EMAIL>> # # Permission is hereby granted, free of charge,", "Refer to this repository for detailed # documentation. import time", "WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT", "+ self._ki * self._errorIntegral + \\ self._kd * errorDerivative #", "if self._boundRange: if cmd < self._cmdMin: cmd = self._cmdMin elif", "# Private methods def _boundCmd(self, cmd): \"\"\"Bounds the command within", "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES", "the last call to the method.\"\"\" # Calculate time step", "integral and derivative terms.\"\"\" self._kp = kp self._ki = ki", "current value of the process variable to be controlled, The", "procVar): \"\"\"Gets the PID command with automatic time step calculation.", "to permit persons to whom the Software is # furnished", "WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING" ]
[ "= self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message):", "and executes some simple # operations. # # (c) ISC", "msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload = dict() slack_payload['text']", "except Exception as x: self.log.error(str(x)) go_on = True if message['msg']", "self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket is not None:", "to slack\") url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers =", "bind(self): self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context", "message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as x: self.log.error(str(x)) go_on", "the message record to be send to slack :return: None", "json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload = dict() slack_payload['text'] = message['msg']", "attachment[\"fallback\"] = message['msg'] attachment['text'] = message['msg'] attachment['title'] = message['msg.type'] attachment['author_name']", "# operations. # # (c) ISC Clemenz & Weinbrecht GmbH", "= \"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def", "msg = self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod def", "message['msg'] attachments = list() slack_payload['attachments'] = attachments attachment = dict()", "attachments attachment = dict() attachment[\"fallback\"] = message['msg'] attachment['text'] = message['msg']", "message to Slack Web-Hook. :param message: the message record to", "to Slack Web-Hook. :param message: the message record to be", "x: self.log.error(str(x)) def respond(self): go_on = True while go_on: message", "self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code)) except Exception as x: self.log.error(str(x))", "ZmqResponder(object): context = None socket = None def __init__(self): \"\"\"", "import zmq import pmon class ZmqResponder(object): context = None socket", "Constructor. \"\"\" self.cfg = pmon.CFG self.log = pmon.LOG def __enter__(self):", "not None: self.context.term() def _read_message(self): self.log.debug(\"Wait for incoming message\") msg", "slack\") url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept':", "self.socket is not None: self.socket.close() if self.context is not None:", "json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length':", "if self.context is not None: self.context.term() def _read_message(self): self.log.debug(\"Wait for", "to be send to slack :return: None \"\"\" self.log.debug(\"Forwarding message", "slack_payload = dict() slack_payload['text'] = message['msg'] attachments = list() slack_payload['attachments']", "attachment['title'] = message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload def", "= message['msg'] attachment['text'] = message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] =", "attachment = dict() attachment[\"fallback\"] = message['msg'] attachment['text'] = message['msg'] attachment['title']", "is not None: self.socket.close() if self.context is not None: self.context.term()", "to slack: {0}\".format(rsp.status_code)) except Exception as x: self.log.error(str(x)) def respond(self):", "as x: self.log.error(str(x)) go_on = True if message['msg'] != 'stop'", "= self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type':", "dict() attachment[\"fallback\"] = message['msg'] attachment['text'] = message['msg'] attachment['title'] = message['msg.type']", "utf-8-*- # receives messages via zmq and executes some simple", "return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload = dict() slack_payload['text'] =", "# # (c) ISC Clemenz & Weinbrecht GmbH 2018 #", "slack_payload def _report_message_to_slack(self, message): \"\"\" Send a message to Slack", "class ZmqResponder(object): context = None socket = None def __init__(self):", "def bind(self): self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port)", "is not None: self.context.term() def _read_message(self): self.log.debug(\"Wait for incoming message\")", "bind_str = \"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str)", "def respond(self): go_on = True while go_on: message = self._read_message()", "= json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8',", "'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp = requests.post(url, data=payload,", "= attachments attachment = dict() attachment[\"fallback\"] = message['msg'] attachment['text'] =", "as x: self.log.error(str(x)) def respond(self): go_on = True while go_on:", "self.cfg = pmon.CFG self.log = pmon.LOG def __enter__(self): self.bind() return", "except Exception as x: self.log.error(str(x)) def respond(self): go_on = True", "message to slack\") url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers", ":param message: the message record to be send to slack", "to slack :return: None \"\"\" self.log.debug(\"Forwarding message to slack\") url", "'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp = requests.post(url,", "= message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self,", "self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as", "headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))}", "Clemenz & Weinbrecht GmbH 2018 # import json import requests", "= {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try:", "self._report_message_to_slack(message) except Exception as x: self.log.error(str(x)) go_on = True if", "self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context =", "ZMQ\") if self.socket is not None: self.socket.close() if self.context is", "self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload", "__exit__(self, exc_type, exc_val, exc_tb): self.done() def bind(self): self.log.info(\"Binding ZMQ\") port", "exc_tb): self.done() def bind(self): self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str", "try: self._report_message_to_slack(message) except Exception as x: self.log.error(str(x)) go_on = True", "self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception", "message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message):", "= self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except", "port = self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket", "{'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp", "= None def __init__(self): \"\"\" Constructor. \"\"\" self.cfg = pmon.CFG", "be send to slack :return: None \"\"\" self.log.debug(\"Forwarding message to", "= pmon.LOG def __enter__(self): self.bind() return self def __exit__(self, exc_type,", "incoming message\") msg = self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg)", "self.log = pmon.LOG def __enter__(self): self.bind() return self def __exit__(self,", "def _report_message_to_slack(self, message): \"\"\" Send a message to Slack Web-Hook.", "go_on = True while go_on: message = self._read_message() self.log.debug(\"Message: {0},", "Slack Web-Hook. :param message: the message record to be send", "receives messages via zmq and executes some simple # operations.", "done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket is not None: self.socket.close() if", "Exception as x: self.log.error(str(x)) def respond(self): go_on = True while", "self.socket.close() if self.context is not None: self.context.term() def _read_message(self): self.log.debug(\"Wait", "if rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code)) except", "message['msg'] attachment['text'] = message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] = message['from']", "self.context is not None: self.context.term() def _read_message(self): self.log.debug(\"Wait for incoming", "operations. # # (c) ISC Clemenz & Weinbrecht GmbH 2018", "_read_message(self): self.log.debug(\"Wait for incoming message\") msg = self.socket.recv() _msg =", "for incoming message\") msg = self.socket.recv() _msg = msg.decode('utf-8') return", "self def __exit__(self, exc_type, exc_val, exc_tb): self.done() def bind(self): self.log.info(\"Binding", "zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\") if", "not None: self.socket.close() if self.context is not None: self.context.term() def", "= dict() slack_payload['text'] = message['msg'] attachments = list() slack_payload['attachments'] =", "self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as x: self.log.error(str(x)) go_on =", "headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code))", "None: self.socket.close() if self.context is not None: self.context.term() def _read_message(self):", "\"\"\" Send a message to Slack Web-Hook. :param message: the", "(c) ISC Clemenz & Weinbrecht GmbH 2018 # import json", "None def __init__(self): \"\"\" Constructor. \"\"\" self.cfg = pmon.CFG self.log", "attachment['text'] = message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment)", "\"\"\" self.log.debug(\"Forwarding message to slack\") url = self.cfg['pmon']['slack.hook'] payload =", "= message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message): \"\"\" Send", "= True while go_on: message = self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'],", "message record to be send to slack :return: None \"\"\"", "= requests.post(url, data=payload, headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending", "\"\"\" self.cfg = pmon.CFG self.log = pmon.LOG def __enter__(self): self.bind()", "attachments = list() slack_payload['attachments'] = attachments attachment = dict() attachment[\"fallback\"]", "rsp = requests.post(url, data=payload, headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn(\"problem", "'utf8', 'Content-Length': str(len(payload))} try: rsp = requests.post(url, data=payload, headers=headers) if", "via zmq and executes some simple # operations. # #", "_report_message_to_slack(self, message): \"\"\" Send a message to Slack Web-Hook. :param", "zmq import pmon class ZmqResponder(object): context = None socket =", "{1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as x: self.log.error(str(x))", "def _read_message(self): self.log.debug(\"Wait for incoming message\") msg = self.socket.recv() _msg", "return self def __exit__(self, exc_type, exc_val, exc_tb): self.done() def bind(self):", "-*- coding: utf-8-*- # receives messages via zmq and executes", "import requests import zmq import pmon class ZmqResponder(object): context =", "# receives messages via zmq and executes some simple #", "'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp = requests.post(url, data=payload, headers=headers)", "x: self.log.error(str(x)) go_on = True if message['msg'] != 'stop' else", "self.bind() return self def __exit__(self, exc_type, exc_val, exc_tb): self.done() def", "= dict() attachment[\"fallback\"] = message['msg'] attachment['text'] = message['msg'] attachment['title'] =", "self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP)", "None \"\"\" self.log.debug(\"Forwarding message to slack\") url = self.cfg['pmon']['slack.hook'] payload", "Weinbrecht GmbH 2018 # import json import requests import zmq", "def __init__(self): \"\"\" Constructor. \"\"\" self.cfg = pmon.CFG self.log =", "def done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket is not None: self.socket.close()", "def __exit__(self, exc_type, exc_val, exc_tb): self.done() def bind(self): self.log.info(\"Binding ZMQ\")", "data=payload, headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending to slack:", "import pmon class ZmqResponder(object): context = None socket = None", "message): \"\"\" Send a message to Slack Web-Hook. :param message:", ":return: None \"\"\" self.log.debug(\"Forwarding message to slack\") url = self.cfg['pmon']['slack.hook']", "Web-Hook. :param message: the message record to be send to", "def __enter__(self): self.bind() return self def __exit__(self, exc_type, exc_val, exc_tb):", "= pmon.CFG self.log = pmon.LOG def __enter__(self): self.bind() return self", "def _make_slack_payload(message): slack_payload = dict() slack_payload['text'] = message['msg'] attachments =", "slack: {0}\".format(rsp.status_code)) except Exception as x: self.log.error(str(x)) def respond(self): go_on", "None: self.context.term() def _read_message(self): self.log.debug(\"Wait for incoming message\") msg =", "= zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\")", "messages via zmq and executes some simple # operations. #", "requests.codes.ok: self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code)) except Exception as x:", "ISC Clemenz & Weinbrecht GmbH 2018 # import json import", "pmon class ZmqResponder(object): context = None socket = None def", "{0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message) except Exception as x:", "# # -*- coding: utf-8-*- # receives messages via zmq", "pmon.CFG self.log = pmon.LOG def __enter__(self): self.bind() return self def", "self.done() def bind(self): self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str =", "a message to Slack Web-Hook. :param message: the message record", "record to be send to slack :return: None \"\"\" self.log.debug(\"Forwarding", "self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket is not", "& Weinbrecht GmbH 2018 # import json import requests import", "self.log.info(\"Disconnecting ZMQ\") if self.socket is not None: self.socket.close() if self.context", "= list() slack_payload['attachments'] = attachments attachment = dict() attachment[\"fallback\"] =", "@staticmethod def _make_slack_payload(message): slack_payload = dict() slack_payload['text'] = message['msg'] attachments", "message\") msg = self.socket.recv() _msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod", "self.log.debug(\"Wait for incoming message\") msg = self.socket.recv() _msg = msg.decode('utf-8')", "message = self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try: self._report_message_to_slack(message)", "requests.post(url, data=payload, headers=headers) if rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending to", "executes some simple # operations. # # (c) ISC Clemenz", "ZMQ\") port = self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context = zmq.Context(1)", "Exception as x: self.log.error(str(x)) go_on = True if message['msg'] !=", "json import requests import zmq import pmon class ZmqResponder(object): context", "self.log.error(str(x)) go_on = True if message['msg'] != 'stop' else False", "self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket", "simple # operations. # # (c) ISC Clemenz & Weinbrecht", "= message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return", "slack :return: None \"\"\" self.log.debug(\"Forwarding message to slack\") url =", "socket = None def __init__(self): \"\"\" Constructor. \"\"\" self.cfg =", "'application/json', 'Content-Type': 'application/json', 'Content-Encoding': 'utf8', 'Content-Length': str(len(payload))} try: rsp =", "= message['msg'] attachments = list() slack_payload['attachments'] = attachments attachment =", "= None socket = None def __init__(self): \"\"\" Constructor. \"\"\"", "attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message): \"\"\" Send a message", "list() slack_payload['attachments'] = attachments attachment = dict() attachment[\"fallback\"] = message['msg']", "go_on: message = self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK') try:", "exc_val, exc_tb): self.done() def bind(self): self.log.info(\"Binding ZMQ\") port = self.cfg['pmon']['zmq.port']", "pmon.LOG def __enter__(self): self.bind() return self def __exit__(self, exc_type, exc_val,", "payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Content-Encoding':", "exc_type, exc_val, exc_tb): self.done() def bind(self): self.log.info(\"Binding ZMQ\") port =", "coding: utf-8-*- # receives messages via zmq and executes some", "__init__(self): \"\"\" Constructor. \"\"\" self.cfg = pmon.CFG self.log = pmon.LOG", "'Content-Length': str(len(payload))} try: rsp = requests.post(url, data=payload, headers=headers) if rsp.status_code", "sending to slack: {0}\".format(rsp.status_code)) except Exception as x: self.log.error(str(x)) def", "slack_payload['text'] = message['msg'] attachments = list() slack_payload['attachments'] = attachments attachment", "True while go_on: message = self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg']))", "try: rsp = requests.post(url, data=payload, headers=headers) if rsp.status_code != requests.codes.ok:", "GmbH 2018 # import json import requests import zmq import", "send to slack :return: None \"\"\" self.log.debug(\"Forwarding message to slack\")", "requests import zmq import pmon class ZmqResponder(object): context = None", "self.context.term() def _read_message(self): self.log.debug(\"Wait for incoming message\") msg = self.socket.recv()", "message['msg'] attachment['title'] = message['msg.type'] attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload", "# (c) ISC Clemenz & Weinbrecht GmbH 2018 # import", "\"\"\" Constructor. \"\"\" self.cfg = pmon.CFG self.log = pmon.LOG def", "if self.socket is not None: self.socket.close() if self.context is not", "rsp.status_code != requests.codes.ok: self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code)) except Exception", "# -*- coding: utf-8-*- # receives messages via zmq and", "# import json import requests import zmq import pmon class", "= msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload = dict()", "import json import requests import zmq import pmon class ZmqResponder(object):", "message: the message record to be send to slack :return:", "\"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self):", "str(len(payload))} try: rsp = requests.post(url, data=payload, headers=headers) if rsp.status_code !=", "= self.cfg['pmon']['zmq.port'] bind_str = \"tcp://*:{0}\".format(port) self.context = zmq.Context(1) self.socket =", "self.context = zmq.Context(1) self.socket = self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting", "attachment['author_name'] = message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message): \"\"\"", "respond(self): go_on = True while go_on: message = self._read_message() self.log.debug(\"Message:", "None socket = None def __init__(self): \"\"\" Constructor. \"\"\" self.cfg", "some simple # operations. # # (c) ISC Clemenz &", "self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json', 'Content-Type': 'application/json',", "url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message)) headers = {'Accept': 'application/json',", "dict() slack_payload['text'] = message['msg'] attachments = list() slack_payload['attachments'] = attachments", "return slack_payload def _report_message_to_slack(self, message): \"\"\" Send a message to", "self.log.error(str(x)) def respond(self): go_on = True while go_on: message =", "self.log.debug(\"Forwarding message to slack\") url = self.cfg['pmon']['slack.hook'] payload = json.dumps(self._make_slack_payload(message))", "Send a message to Slack Web-Hook. :param message: the message", "while go_on: message = self._read_message() self.log.debug(\"Message: {0}, {1}\".format(message['msg.type'], message['msg'])) self.socket.send_string('ACK')", "= self.context.socket(zmq.REP) self.socket.bind(bind_str) def done(self): self.log.info(\"Disconnecting ZMQ\") if self.socket is", "_make_slack_payload(message): slack_payload = dict() slack_payload['text'] = message['msg'] attachments = list()", "{0}\".format(rsp.status_code)) except Exception as x: self.log.error(str(x)) def respond(self): go_on =", "zmq and executes some simple # operations. # # (c)", "_msg = msg.decode('utf-8') return json.loads(_msg) @staticmethod def _make_slack_payload(message): slack_payload =", "message['from'] attachments.append(attachment) return slack_payload def _report_message_to_slack(self, message): \"\"\" Send a", "!= requests.codes.ok: self.log.warn(\"problem sending to slack: {0}\".format(rsp.status_code)) except Exception as", "slack_payload['attachments'] = attachments attachment = dict() attachment[\"fallback\"] = message['msg'] attachment['text']", "context = None socket = None def __init__(self): \"\"\" Constructor.", "2018 # import json import requests import zmq import pmon", "__enter__(self): self.bind() return self def __exit__(self, exc_type, exc_val, exc_tb): self.done()" ]
[ "op.yatha(b, before, after) == after[i] \"\"\" def test_ti(): assert S.ti(\"ta\",", "i, b in enumerate(before): assert op.yatha(b, before, after) == after[i]", "assert S.ti(\"ta\", \"e\") == \"te\" assert S.ti(\"AtAm\", \"e\") == \"Ate\"", "\"ta\", \"am\") for i, b in enumerate(before): assert op.yatha(b, before,", "test_ti(): assert S.ti(\"ta\", \"e\") == \"te\" assert S.ti(\"AtAm\", \"e\") ==", "for i, b in enumerate(before): assert op.yatha(b, before, after) ==", "enumerate(before): assert op.yatha(b, before, after) == after[i] \"\"\" def test_ti():", "operations as op def test_yatha(): before = (\"tAs\", \"Tas\", \"Ta\",", "op def test_yatha(): before = (\"tAs\", \"Tas\", \"Ta\", \"mip\") after", "\"mip\") after = (\"tAm\", \"tam\", \"ta\", \"am\") for i, b", "(\"tAs\", \"Tas\", \"Ta\", \"mip\") after = (\"tAm\", \"tam\", \"ta\", \"am\")", "S.ti(\"AtAm\", \"e\") == \"Ate\" def test_antya(): assert S.antya(\"ti\", \"u\") ==", "test_antya(): assert S.antya(\"ti\", \"u\") == \"tu\" assert S.antya(\"te\", \"Am\") ==", "= (\"tAs\", \"Tas\", \"Ta\", \"mip\") after = (\"tAm\", \"tam\", \"ta\",", "b in enumerate(before): assert op.yatha(b, before, after) == after[i] \"\"\"", "after) == after[i] \"\"\" def test_ti(): assert S.ti(\"ta\", \"e\") ==", "== after[i] \"\"\" def test_ti(): assert S.ti(\"ta\", \"e\") == \"te\"", "== \"te\" assert S.ti(\"AtAm\", \"e\") == \"Ate\" def test_antya(): assert", "after[i] \"\"\" def test_ti(): assert S.ti(\"ta\", \"e\") == \"te\" assert", "\"am\") for i, b in enumerate(before): assert op.yatha(b, before, after)", "in enumerate(before): assert op.yatha(b, before, after) == after[i] \"\"\" def", "after = (\"tAm\", \"tam\", \"ta\", \"am\") for i, b in", "<filename>test/test_substitute.py from padmini import operations as op def test_yatha(): before", "import operations as op def test_yatha(): before = (\"tAs\", \"Tas\",", "before, after) == after[i] \"\"\" def test_ti(): assert S.ti(\"ta\", \"e\")", "as op def test_yatha(): before = (\"tAs\", \"Tas\", \"Ta\", \"mip\")", "\"Ta\", \"mip\") after = (\"tAm\", \"tam\", \"ta\", \"am\") for i,", "\"\"\" def test_ti(): assert S.ti(\"ta\", \"e\") == \"te\" assert S.ti(\"AtAm\",", "(\"tAm\", \"tam\", \"ta\", \"am\") for i, b in enumerate(before): assert", "\"tam\", \"ta\", \"am\") for i, b in enumerate(before): assert op.yatha(b,", "= (\"tAm\", \"tam\", \"ta\", \"am\") for i, b in enumerate(before):", "assert op.yatha(b, before, after) == after[i] \"\"\" def test_ti(): assert", "\"te\" assert S.ti(\"AtAm\", \"e\") == \"Ate\" def test_antya(): assert S.antya(\"ti\",", "assert S.ti(\"AtAm\", \"e\") == \"Ate\" def test_antya(): assert S.antya(\"ti\", \"u\")", "from padmini import operations as op def test_yatha(): before =", "def test_yatha(): before = (\"tAs\", \"Tas\", \"Ta\", \"mip\") after =", "== \"Ate\" def test_antya(): assert S.antya(\"ti\", \"u\") == \"tu\" assert", "def test_ti(): assert S.ti(\"ta\", \"e\") == \"te\" assert S.ti(\"AtAm\", \"e\")", "\"Ate\" def test_antya(): assert S.antya(\"ti\", \"u\") == \"tu\" assert S.antya(\"te\",", "padmini import operations as op def test_yatha(): before = (\"tAs\",", "def test_antya(): assert S.antya(\"ti\", \"u\") == \"tu\" assert S.antya(\"te\", \"Am\")", "\"e\") == \"Ate\" def test_antya(): assert S.antya(\"ti\", \"u\") == \"tu\"", "\"e\") == \"te\" assert S.ti(\"AtAm\", \"e\") == \"Ate\" def test_antya():", "assert S.antya(\"ti\", \"u\") == \"tu\" assert S.antya(\"te\", \"Am\") == \"tAm\"", "S.antya(\"ti\", \"u\") == \"tu\" assert S.antya(\"te\", \"Am\") == \"tAm\" \"\"\"", "test_yatha(): before = (\"tAs\", \"Tas\", \"Ta\", \"mip\") after = (\"tAm\",", "before = (\"tAs\", \"Tas\", \"Ta\", \"mip\") after = (\"tAm\", \"tam\",", "\"Tas\", \"Ta\", \"mip\") after = (\"tAm\", \"tam\", \"ta\", \"am\") for", "S.ti(\"ta\", \"e\") == \"te\" assert S.ti(\"AtAm\", \"e\") == \"Ate\" def" ]
[ "(\"Required Argument: Google Doc file identifier (if you do not", "+ tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf =", "= TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These are the members with", "'__author__' == '<NAME>, NYU - GitHub: fedhere' import sys import", "a group to stdout, #both primary and secondary members #", "up the arguments we will use in our script \"\"\"", "parser = ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose", "print (\"These are the members with secondary affiliation with \"", "default=None, help='Choose the subgroup affiliation:' + ' -- '.join([s for", "json filename parser = ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store',", "identifier (if you do not have it email federica!)\") sys.exit()", "secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These", "to stdout, #both primary and secondary members # run as", "(\"If you also want their names and affiliations use: \")", "__name__ == '__main__': if tvsfile is None: print (\"Required Argument:", "subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup affiliation:' + '", "from __future__ import print_function '__author__' == '<NAME>, NYU - GitHub:", "em in emails])) emails = TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1']", "sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups", "import print_function '__author__' == '<NAME>, NYU - GitHub: fedhere' import", "print (' '.join([em + ','for em in emails])) emails =", "'.join([em + ','for em in emails])) emails = TVSMembers[(TVSMembers.secondary ==", "the extension & use it to build up # the", "primary]['email'].values print (\"These are the members with primary affiliation with", "affiliation with \" + primary) print (\"\") print (' '.join([em", "| (TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\") print (\"These are the", "in emails])) print (\"\") print (\"If you also want their", "in emails])) emails = TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] ==", "parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup affiliation:' + ' --", "'.join([s for s in subglist])) args = parser.parse_args() return args", "in subgroups if str(x) != 'nan']) primary = conf.subgroup secondary", "sys import pandas as pd from argparse import ArgumentParser from", "#both primary and secondary members # run as # $python", "& use it to build up # the json filename", "parser.parse_args() return args if __name__ == '__main__': if tvsfile is", "= conf.subgroup secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values", "| (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\")", "to build up # the json filename parser = ArgumentParser(description='Selecting", "args = parser.parse_args() return args if __name__ == '__main__': if", "index_col=0) subgroups = TVSMembers.primary.unique() conf = parse_args([x for x in", "+ ','for em in emails])) print (\"\") print (\"If you", "names and affiliations use: \") print (\"$python extractemailsW.py \" +", "is None: print (\"Required Argument: Google Doc file identifier (if", "members with primary affiliation with \" + primary) print (\"\")", "with \" + primary) print (\"\") print (' '.join([em +", "extractemails_nogui.py \"Tidal Disruption Events\" from __future__ import print_function '__author__' ==", "of a group to stdout, #both primary and secondary members", "coding: utf-8 #just prints the emails of members of a", "== '<NAME>, NYU - GitHub: fedhere' import sys import pandas", "members by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup affiliation:'", "if tvsfile is None: print (\"Required Argument: Google Doc file", "'.join([em + ','for em in emails])) print (\"\") print (\"If", "also want their names and affiliations use: \") print (\"$python", "members # run as # $python extractemails_nogui.py \"Tidal Disruption Events\"", "subgroups = TVSMembers.primary.unique() conf = parse_args([x for x in subgroups", "pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf", "print (\"\") print (' '.join([em + ','for em in emails]))", "GitHub: fedhere' import sys import pandas as pd from argparse", "arguments we will use in our script \"\"\" stored_args =", "== '__main__': if tvsfile is None: print (\"Required Argument: Google", "Argument: Google Doc file identifier (if you do not have", "will use in our script \"\"\" stored_args = {} #", "you do not have it email federica!)\") sys.exit() TVSMembers =", "do not have it email federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/'", "\" + primary) print (\"\") print (' '.join([em + ','for", "# get the script name without the extension & use", "help='Choose the subgroup affiliation:' + ' -- '.join([s for s", "name without the extension & use it to build up", "build up the arguments we will use in our script", "up # the json filename parser = ArgumentParser(description='Selecting members by", "emails])) emails = TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] == secondary)", "== secondary)]['email'].values print (\"\\n\") print (\"These are the members with", "conf.subgroup secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values print", "if __name__ == '__main__': if tvsfile is None: print (\"Required", "the script name without the extension & use it to", "print (\"These are the members with primary affiliation with \"", "emails])) print (\"\") print (\"If you also want their names", "Events\" from __future__ import print_function '__author__' == '<NAME>, NYU -", "print_function '__author__' == '<NAME>, NYU - GitHub: fedhere' import sys", "email federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv',", "primary and secondary members # run as # $python extractemails_nogui.py", "of members of a group to stdout, #both primary and", "print (\"Required Argument: Google Doc file identifier (if you do", "Google Doc file identifier (if you do not have it", "with primary affiliation with \" + primary) print (\"\") print", "script name without the extension & use it to build", "'__main__': if tvsfile is None: print (\"Required Argument: Google Doc", "NYU - GitHub: fedhere' import sys import pandas as pd", "ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup", "#just prints the emails of members of a group to", "emails of members of a group to stdout, #both primary", "in subglist])) args = parser.parse_args() return args if __name__ ==", "ArgumentParser from config import tvsfile def parse_args(subglist): \"\"\" Use ArgParser", "# $python extractemails_nogui.py \"Tidal Disruption Events\" from __future__ import print_function", "build up # the json filename parser = ArgumentParser(description='Selecting members", "(' '.join([em + ','for em in emails])) emails = TVSMembers[(TVSMembers.secondary", "== secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\") print (\"These", "+ ' -- '.join([s for s in subglist])) args =", "import ArgumentParser from config import tvsfile def parse_args(subglist): \"\"\" Use", "str(x) != 'nan']) primary = conf.subgroup secondary = conf.subgroup emails", "= {} # get the script name without the extension", "\"Tidal Disruption Events\" from __future__ import print_function '__author__' == '<NAME>,", "' -- '.join([s for s in subglist])) args = parser.parse_args()", "secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\") print (\"These are", "(\"\\n\") print (\"These are the members with secondary affiliation with", "!= 'nan']) primary = conf.subgroup secondary = conf.subgroup emails =", "= TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2']", "secondary) | (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print", "not have it email federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' +", "= conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These are", "their names and affiliations use: \") print (\"$python extractemailsW.py \"", "print (\"\") print (\"If you also want their names and", "$python extractemails_nogui.py \"Tidal Disruption Events\" from __future__ import print_function '__author__'", "Doc file identifier (if you do not have it email", "ArgParser to build up the arguments we will use in", "'nan']) primary = conf.subgroup secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary", "pd from argparse import ArgumentParser from config import tvsfile def", "tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf = parse_args([x", "with \" + secondary) print (\"\") print (' '.join([em +", "(\"These are the members with secondary affiliation with \" +", "import sys import pandas as pd from argparse import ArgumentParser", "-- '.join([s for s in subglist])) args = parser.parse_args() return", "= pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique()", "affiliation:' + ' -- '.join([s for s in subglist])) args", "+ primary) print (\"\") print (' '.join([em + ','for em", "the members with primary affiliation with \" + primary) print", "and secondary members # run as # $python extractemails_nogui.py \"Tidal", "fedhere' import sys import pandas as pd from argparse import", "for s in subglist])) args = parser.parse_args() return args if", "by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the subgroup affiliation:' +", "= parse_args([x for x in subgroups if str(x) != 'nan'])", "from config import tvsfile def parse_args(subglist): \"\"\" Use ArgParser to", "are the members with primary affiliation with \" + primary)", "emails = TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] == secondary) |", "== secondary) | (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values", "the json filename parser = ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup',", "x in subgroups if str(x) != 'nan']) primary = conf.subgroup", "members with secondary affiliation with \" + secondary) print (\"\")", "stdout, #both primary and secondary members # run as #", "Disruption Events\" from __future__ import print_function '__author__' == '<NAME>, NYU", "(\"\") print (' '.join([em + ','for em in emails])) print", "parse_args([x for x in subgroups if str(x) != 'nan']) primary", "run as # $python extractemails_nogui.py \"Tidal Disruption Events\" from __future__", "\"\"\" stored_args = {} # get the script name without", "None: print (\"Required Argument: Google Doc file identifier (if you", "+ ','for em in emails])) emails = TVSMembers[(TVSMembers.secondary == secondary)", "primary affiliation with \" + primary) print (\"\") print ('", "tvsfile is None: print (\"Required Argument: Google Doc file identifier", "primary = conf.subgroup secondary = conf.subgroup emails = TVSMembers[TVSMembers.primary ==", "if str(x) != 'nan']) primary = conf.subgroup secondary = conf.subgroup", "def parse_args(subglist): \"\"\" Use ArgParser to build up the arguments", "(\"\") print (\"If you also want their names and affiliations", "subgroup affiliation:' + ' -- '.join([s for s in subglist]))", "= TVSMembers.primary.unique() conf = parse_args([x for x in subgroups if", "without the extension & use it to build up #", "are the members with secondary affiliation with \" + secondary)", "','for em in emails])) print (\"\") print (\"If you also", "(if you do not have it email federica!)\") sys.exit() TVSMembers", "TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These are the members with primary", "with secondary affiliation with \" + secondary) print (\"\") print", "(\"\") print (' '.join([em + ','for em in emails])) emails", "+ secondary) print (\"\") print (' '.join([em + ','for em", "as pd from argparse import ArgumentParser from config import tvsfile", "group to stdout, #both primary and secondary members # run", "use in our script \"\"\" stored_args = {} # get", "the subgroup affiliation:' + ' -- '.join([s for s in", "= ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store', default=None, help='Choose the", "in our script \"\"\" stored_args = {} # get the", "print (\"If you also want their names and affiliations use:", "import pandas as pd from argparse import ArgumentParser from config", "secondary members # run as # $python extractemails_nogui.py \"Tidal Disruption", "return args if __name__ == '__main__': if tvsfile is None:", "we will use in our script \"\"\" stored_args = {}", "'/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf = parse_args([x for x", "it email federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile +", "federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0)", "the arguments we will use in our script \"\"\" stored_args", "to build up the arguments we will use in our", "args if __name__ == '__main__': if tvsfile is None: print", "as # $python extractemails_nogui.py \"Tidal Disruption Events\" from __future__ import", "the members with secondary affiliation with \" + secondary) print", "em in emails])) print (\"\") print (\"If you also want", "# the json filename parser = ArgumentParser(description='Selecting members by subgroup')", "TVSMembers.primary.unique() conf = parse_args([x for x in subgroups if str(x)", "action='store', default=None, help='Choose the subgroup affiliation:' + ' -- '.join([s", "== primary]['email'].values print (\"These are the members with primary affiliation", "emails = TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These are the members", "and affiliations use: \") print (\"$python extractemailsW.py \" + conf.subgroup)", "argparse import ArgumentParser from config import tvsfile def parse_args(subglist): \"\"\"", "prints the emails of members of a group to stdout,", "print (' '.join([em + ','for em in emails])) print (\"\")", "import tvsfile def parse_args(subglist): \"\"\" Use ArgParser to build up", "TVSMembers[(TVSMembers.secondary == secondary) | (TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] ==", "(TVSMembers['secondary.1'] == secondary) | (TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\") print", "pandas as pd from argparse import ArgumentParser from config import", "','for em in emails])) emails = TVSMembers[(TVSMembers.secondary == secondary) |", "secondary affiliation with \" + secondary) print (\"\") print ('", "secondary)]['email'].values print (\"\\n\") print (\"These are the members with secondary", "it to build up # the json filename parser =", "members of a group to stdout, #both primary and secondary", "= parser.parse_args() return args if __name__ == '__main__': if tvsfile", "(' '.join([em + ','for em in emails])) print (\"\") print", "Use ArgParser to build up the arguments we will use", "from argparse import ArgumentParser from config import tvsfile def parse_args(subglist):", "want their names and affiliations use: \") print (\"$python extractemailsW.py", "conf.subgroup emails = TVSMembers[TVSMembers.primary == primary]['email'].values print (\"These are the", "have it email federica!)\") sys.exit() TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile", "s in subglist])) args = parser.parse_args() return args if __name__", "the emails of members of a group to stdout, #both", "'<NAME>, NYU - GitHub: fedhere' import sys import pandas as", "our script \"\"\" stored_args = {} # get the script", "filename parser = ArgumentParser(description='Selecting members by subgroup') parser.add_argument('subgroup', action='store', default=None,", "__future__ import print_function '__author__' == '<NAME>, NYU - GitHub: fedhere'", "(\"These are the members with primary affiliation with \" +", "primary) print (\"\") print (' '.join([em + ','for em in", "(TVSMembers['secondary.2'] == secondary)]['email'].values print (\"\\n\") print (\"These are the members", "affiliation with \" + secondary) print (\"\") print (' '.join([em", "parse_args(subglist): \"\"\" Use ArgParser to build up the arguments we", "utf-8 #just prints the emails of members of a group", "script \"\"\" stored_args = {} # get the script name", "get the script name without the extension & use it", "- GitHub: fedhere' import sys import pandas as pd from", "use it to build up # the json filename parser", "secondary) print (\"\") print (' '.join([em + ','for em in", "{} # get the script name without the extension &", "+ '/export?gid=0&format=csv', index_col=0) subgroups = TVSMembers.primary.unique() conf = parse_args([x for", "# run as # $python extractemails_nogui.py \"Tidal Disruption Events\" from", "# coding: utf-8 #just prints the emails of members of", "print (\"\\n\") print (\"These are the members with secondary affiliation", "conf = parse_args([x for x in subgroups if str(x) !=", "file identifier (if you do not have it email federica!)\")", "extension & use it to build up # the json", "subgroups if str(x) != 'nan']) primary = conf.subgroup secondary =", "TVSMembers = pd.read_csv('https://docs.google.com/spreadsheets/d/' + tvsfile + '/export?gid=0&format=csv', index_col=0) subgroups =", "stored_args = {} # get the script name without the", "\"\"\" Use ArgParser to build up the arguments we will", "\" + secondary) print (\"\") print (' '.join([em + ','for", "for x in subgroups if str(x) != 'nan']) primary =", "you also want their names and affiliations use: \") print", "config import tvsfile def parse_args(subglist): \"\"\" Use ArgParser to build", "tvsfile def parse_args(subglist): \"\"\" Use ArgParser to build up the", "subglist])) args = parser.parse_args() return args if __name__ == '__main__':" ]
[ "utils.BetterUserconverter = None): if user is None: await ctx.send(\"You can't", "message to use.\") message = await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message", "self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog", "args = None): if args is None:await ctx.send(\"Can't reload module", "await ctx.send(e) try: value=importlib.reload(module) except Exception as e: traceback.print_exc() return", "reason why:\") reason = await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await", "embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a command to give", "*, command = None): if not command: return await ctx.send(\"please", "ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should be unloaded just fine :D.(check", "except Exception as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see", "ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see if there's any errors)\") if", "await cur.close() await ctx.send(f\"Removed the rfm value {name}.\") @commands.command(brief =", "paste = await mystbin_client.post(page) await ctx.author.send(content=f\"Added text file to mystbin:", "16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\")", "os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret =", "\"Testing Users:\", color = random.randint(0, 16777215)) embed.add_field(name = \"User ID:\",", "command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *, command = None):", "need to fix all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\") async", "return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await", "Status(Owner Only)\") async def status(self , ctx , * ,", ":(\") try: await guild.leave() except Exception as e: await ctx.send(f\"Somehow", "await ctx.send(f\"Exception occured at {e}\") await ctx.send(f\"Url of sent tweet", "await ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a command to remove sus", "be chunked\") await ctx.guild.chunk(cache = True) await ctx.send(\"Finished chunking..\") @chunk_guild.error", "await ctx.send(\"JDJG doesn't take any responbility for what you upload", "ctx.send(error) @commands.command(brief=\"a command to give a list of servers(owner only)\",help=\"Gives", "== \"all\": for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError", "Owners only) but with join dates updated.\") async def servers2(self,", "sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}]", "errors)\") @commands.command() async def unload(self, ctx, *, cog = None):", "auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief", "commands.Paginator() for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel", "unload(self, ctx, *, cog = None): if cog: try: self.bot.unload_extension(cog)", "{item[0]}\", value = f\"**Reason :** {item[1]}\", inline = False) return", "from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel", "returning Letter\") user = ctx.author if user: await ctx.reply(\"Please give", "msg.content else f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}'", "if command_wanted: await ctx.send(f\"{command_wanted.name} now accessible for the {user} for", "webhook\") async def webhook_update(self, ctx, *, args = None): if", "await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should be unloaded just fine", "= await cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users = tuple(await cursor.fetchall())", "provide some attachments.\") await ctx.send(\"JDJG doesn't take any responbility for", "await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded :D (check for any", "\"all\": for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as", "= \"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else 'User')})", "from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot", "@commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *,", "= True) await menu.start(ctx) @commands.command() async def update_sus(self, ctx): await", "DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx, name = None, *,", "utils.BetterUserconverter = None): if user is None: await ctx.send(\"can't have", "can't use that it's owner only\") @commands.command(brief=\"only works with JDJG,", "is None: await ctx.send(\"you can't ask to reload no cogs\")", "\"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) :", "name is None: return await ctx.send(\"You can't remove None\") cur", "sus users, succesfully\") @commands.command(brief=\"a command to remove sus users.\") async", "couldn't delete the message in this guils so, I kept", "try: tweet_time = functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time)", "attachments.\") await ctx.send(\"JDJG doesn't take any responbility for what you", "menu.start(ctx) @commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated", "WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if", "mail\") async def mail(self, ctx, *, user: utils.BetterUserconverter = None):", "ctx.channel.history(limit = None, oldest_first = True).flatten() new_line = \"\\n\" page", "and urls to rtfm DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx,", "be none.\") if user: await ctx.reply(\"Please give me a reason", "@commands.command(brief=\"Only owner command to change bot's nickname\") async def change_nick(self,", "await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async", "it appears to be chunked\") await ctx.guild.chunk(cache = True) await", "embed = discord.Embed(title = \"Testing Users:\", color = random.randint(0, 16777215))", "= functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception", "to swap to autoconverter if it gets added. user =", "None: await ctx.send(\"Please specify a valid command.\") if command is", "error, trace))) pages = textwrap.wrap(values, width = 1992) menu =", "command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send(\"Please specify", "it into a file or mystbin\") async def channel_backup(self, ctx):", "in the testers list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known", "| {(g.system_channel or g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for page in", "to the rtfm DB\") @commands.command(brief = \"removes packages from the", "sure to swap to autoconverter if it gets added. user", "mystbin_client.post(page) await ctx.author.send(content=f\"Added text file to mystbin: \\n{paste.url}\") @channel_backup.error async", "self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail", "doesn't exist or a channel that is a DM.\") if", "return twitter_api.update_status(status = post_text) @commands.command(brief = \"sends tweet to JDBot", "value {name}.\") @commands.command(brief = \"a command to save images to", "await ctx.send(\"Guild is None can't do anything.\") await ctx.send(\"Bot leaving", "await user.send(embed=embed_message) except: user = ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message)", "ID:\", value = f\"{item}\", inline = False) return embed @commands.command(brief", "servers | {len(self.bot.users)} users\")) @stats_status.error async def stats_status_error(self, ctx, error):", "JDJG, but this command is meant to send updates to", "commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading all", "= 1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm()", "await ctx.send(\"you can't ask to reload no cogs\") @commands.command() async", "{user} for one command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None:", "await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb)", "traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except Exception as e:", "(f\"{args}:\")) await ctx.send(f\"Result of the input was {result}\") @commands.command(brief=\"a powerful", "ctx, *, user: utils.BetterUserconverter = None): user = user or", "await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png')", "there's any errors)\") if cog is None: await ctx.send(\"you can't", "ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu,", "an owner only command\") @commands.command(brief=\"Only owner command to change bot's", "Dms.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use", "not url or not name and not url: return await", "@commands.command(brief=\"backs up a channel and then sends it into a", "os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command", "def unload(self, ctx, *, cog = None): if cog: try:", "await ctx.send(\"Please specify a valid command.\") if command is None:", "def add_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user", "VALUES (?, ?)\", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await", "= ?\", (name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed the", "value = f\"{item}\", inline = False) return embed @commands.command(brief =", "if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try: await", "await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async", "\\n{paste.url}\") @channel_backup.error async def channel_backup_error(self, ctx, error): etype = type(error)", "ctx.guild.chunked: return await ctx.send(\"No need to chunk this guild, it", "await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds packages and", "= random.randint(0, 16777215)) embed.add_field(name = \"User ID:\", value = f\"{item}\",", ", args=None): if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity=", "for g in user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for page", "if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag", "if not name or not url or not name and", "await ctx.reply(\"Please give me a reason why:\") reason = await", "use that it's owner only\") @commands.command(brief=\"only works with JDJG, but", "= \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None): await user.create_dm() try: await", "cur.execute(\"INSERT INTO testers_list VALUES (?)\", (user.id,)) await self.bot.sus_users.commit() await cur.close()", "self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?, ?)\", (name, url))", "messages) mystbin_client = mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page)", "def load(self, ctx, *, cog = None): if cog: try:", "= await ctx.channel.history(limit = None, oldest_first = True).flatten() new_line =", "save images to imgur(for owner only lol)\") async def save_image(self,", "| {len(self.bot.users)} users\")) @stats_status.error async def stats_status_error(self, ctx, error): await", "message = await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at),", "ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A command to add", "a list of guilds(Bot Owners only) but with join dates", "return await ctx.send(\"That doesn't have a cooldown/isn't on a cooldown.\")", "timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url", "reload no cogs\") @commands.command() async def shutdown(self, ctx): await ctx.send(\"shutdown/logout", "async def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds,", "cog = None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as", "testers list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known as {user}\")", "use that in Dms.\") if await self.bot.is_owner(ctx.author) is False: await", "or ctx.guild if guild is None: return await ctx.send(\"Guild is", "self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\") pages =", "command\") @commands.command(brief=\"Only owner command to change bot's nickname\") async def", "you upload here :eyes: don't upload anything bad okay?\") for", "await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO", "tester known as {user}\") @commands.command(brief=\"A command to add testers\") async", "tool to reload local files that aren't reloadable.\") async def", "tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True)", "= None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth =", "name and not url: return await ctx.send(\"You need a name", "use.\") message = await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content,", "16777215)) embed.add_field(name = f\"User ID : {item[0]}\", value = f\"**Reason", "ctx.send(e) try: value=importlib.reload(module) except Exception as e: traceback.print_exc() return await", "anything bad okay?\") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read())", "user.id in self.bot.testers: return await ctx.send(f\"{user} isn't in the testers", "[\"No shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if", "{discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or", "user is None: await ctx.reply(\"User not found, returning Letter\") user", "that aren't reloadable.\") async def reload_basic(self, ctx, *, args =", "{paste.url}\") @commands.command(brief = \"adds packages and urls to rtfm DB\",", "to add sus_users with a reason\") async def addsus(self, ctx,", "inline = False) return embed @commands.command(brief = \"a command listed", "webhook_update(self, ctx, *, args = None): if await self.bot.is_owner(ctx.author): if", "async def webhook_update(self, ctx, *, args = None): if await", "channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You", "change bot's nickname\") async def change_nick(self, ctx ,*, name=None): if", "it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update", "name and also url.\") cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT", "= 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\") pages", "async def reload_basic(self, ctx, *, args = None): if args", "functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception as", "as e: await ctx.send(f\"Somehow an error occured: {e}\") traceback.print_exc() @commands.command()", "as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see if there's", "cogs(check for any errors)\") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as", "mail(self, ctx, *, user: utils.BetterUserconverter = None): if user is", "None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's", "await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's an", "@commands.command(brief=\"A command to add sus_users with a reason\") async def", "add testers\") async def add_tester(self, ctx, *, user: utils.BetterUserconverter =", "at {e}\") await ctx.send(f\"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief", "in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\") pages", "= None): if args is None:await ctx.send(\"Can't reload module named", "args is None:await ctx.send(\"Can't reload module named None\") if args:", "await ctx.send(\"You can't use that it's owner only\") def setup(bot):", "only)\",help=\"Gives a list of guilds(Bot Owners only)\") async def servers(self,", "msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}\" if msg.content", "await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list VALUES (?)\", (user.id,)) await", "def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays", "ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item):", "only command\") @commands.command(brief=\"Only owner command to change bot's nickname\") async", "give a list of servers(owner only)\",help=\"Gives a list of guilds(Bot", "powerful owner tool to reload local files that aren't reloadable.\")", "a command\") if not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't have", "cog or \"all\" if cog == \"all\": for x in", "the purpose of testing purpose(it's owner only to be used", "is None: return await ctx.send(\"Guild is None can't do anything.\")", ":D.(check any errors)\") if cog is None: await ctx.send(\"you can't", "ctx, *, cog = None): if cog: try: self.bot.load_extension(cog) except", "all the commands\") async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers,", "Letter\") user = ctx.author if user: await ctx.reply(\"Please give me", "user = user or ctx.author pag = commands.Paginator() for g", "as e: traceback.print_exc() return await ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__}", "cur.execute(\"INSERT INTO sus_users VALUES (?, ?)\", (user.id, reason.content)) await self.bot.sus_users.commit()", "style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\")", "JDJG Inc. Official\", color = random.randint(0, 16777215)) embed.add_field(name = f\"User", "fix all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\") async def status(self", "is None: await ctx.send(\"You sadly can't use it like that.\")", "= commands.Paginator() for g in user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\")", "gets added. user = user or ctx.author if command: command_wanted=self.bot.get_command(command)", "addsus(self, ctx, *, user: utils.BetterUserconverter = None): if user is", "ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays the guild status and user", "= await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?, ?)\",", "__init__(self, bot): self.bot = bot @commands.command(brief=\"a command to send mail\")", "for msg in messages) mystbin_client = mystbin.Client(session = self.bot.session) paste", "don't upload anything bad okay?\") for x in ctx.message.attachments: try:", "that it's owner only\") @commands.command(brief=\"only works with JDJG, but this", "@commands.command(brief=\"Changes Bot Status(Owner Only)\") async def status(self , ctx ,", "ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up a channel", "reloaded :D (check for any errors)\") @commands.command() async def unload(self,", "to give a list of servers(owner only)\",help=\"Gives a list of", "succesfully\") @commands.command(brief=\"a command to remove sus users.\") async def removesus(self,", "await ctx.send(f\"{command_wanted.name} now accessible for the {user} for one command", "async def add_tester(self, ctx, *, user: utils.BetterUserconverter = None): if", "urls to rtfm DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx, name", "\"chunks a guild for the purpose of testing purpose(it's owner", "sus_users WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close()", "it gets added. user = user or ctx.author if command:", "is None:await ctx.send(\"Can't reload module named None\") if args: try:", "autoconverter if it gets added. user = user or ctx.author", "upload anything bad okay?\") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await", "self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if", "ctx): if not ctx.message.attachments: return await ctx.send(\"You need to provide", "async def status(self , ctx , * , args=None): if", "return embed @commands.command(brief=\"a command to give a list of servers(owner", "to remove testers\") async def remove_tester(self, ctx, *, user: utils.BetterUserconverter", "webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's", "person is in.\") async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter", "ctx): await ctx.send(\"shutdown/logout time happening.\") await self.bot.close() async def cog_check(self,", "return await ctx.send(\"You can't remove None\") cur = await self.bot.sus_users.cursor()", "importlib, mystbin, typing, aioimgur, functools, tweepy import traceback, textwrap from", "is None: await ctx.send(\"can't have a user be none.\") if", "None: await ctx.send(\"You can't have a none user.\") if user:", "msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments", "None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A command", "ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc() return", "({('Bot' if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}\"", "to reload local files that aren't reloadable.\") async def reload_basic(self,", "ctx.send(\"You can't use that command\") class ServersEmbed(menus.ListPageSource): async def format_page(self,", "= await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users VALUES (?, ?)\",", "class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title", "(user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed sus users.\") class", "user.\") if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO", "if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\") try:", "menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A command to add sus_users with", "cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\") async def status(self , ctx", "name = None): if name is None: return await ctx.send(\"You", "time happening.\") await self.bot.close() async def cog_check(self, ctx): return await", "False: await ctx.send(\"You can't use that command\") class ServersEmbed(menus.ListPageSource): async", "ctx.send(\"please specify a command\") if not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That", "self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send(\"Please specify a valid", "= importlib.import_module(name=args) except Exception as e: traceback.print_exc() return await ctx.send(e)", "def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None): guild =", "list of guilds(Bot Owners only) but with join dates updated.\")", "await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that command\")", "remove None\") cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY", "in testing guilds only)\") async def chunk_guild(self, ctx): if ctx.guild", "use that command\") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item):", "consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret)", "url or not name and not url: return await ctx.send(\"You", "value=importlib.reload(module) except Exception as e: traceback.print_exc() return await ctx.send(e) await", "that\") @commands.command(brief=\"Commands to see what guilds a person is in.\")", "if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's an owner only", "remove sus users.\") async def removesus(self, ctx, *, user: utils.BetterUserconverter", "embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session", "cursor = await cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users = tuple(await", "user is None: await ctx.send(\"You can't have a non existent", "format_page(self, menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a", "guilds only)\") async def chunk_guild(self, ctx): if ctx.guild is None:", "def removesus(self, ctx, *, user: utils.BetterUserconverter = None): if user", "is None: await ctx.send(\"you can't ask to load no cogs.\")", "if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in self.bot.guilds:", "leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None): guild = guild", "= None): if not name or not url or not", "or \"all\" if cog == \"all\": for x in list(self.bot.extensions):", "ctx): await ctx.send(\"changing status, check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)}", "cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list VALUES (?)\",", "a valid command.\") if command is None: await ctx.send(\"select a", "= \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments", "embed.add_field(name = \"User ID:\", value = f\"{item}\", inline = False)", "cog: try: self.bot.load_extension(cog) except Exception as e: await ctx.send(e) traceback.print_exc()", "return await ctx.send(\"You can't chunk a guild that doesn't exist", "if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e)", "change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel):", "command\") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed =", "TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title =", "ctx.send(\"You can't use that in Dms.\") if await self.bot.is_owner(ctx.author) is", "[\"reset_cooldown\"]) async def resetcooldown(self, ctx, *, command = None): if", "except Exception as e: traceback.print_exc() return await ctx.send(f\"Exception occured at", "def __init__(self, bot): self.bot = bot @commands.command(brief=\"a command to send", "with JDJG, but this command is meant to send updates", "False: await ctx.send(\"You can't use that\") @commands.command(brief=\"Commands to see what", "reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"added sus users, succesfully\")", "self.bot.sus_users.commit() await cur.close() await ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a command", "aioinput_test(self, ctx, *, args = None): args = args or", "def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\") @update_sus.error", "f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None): await user.create_dm()", "if not command_wanted: return await ctx.send(\"please specify a command\") if", "embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *,", "for you.\",aliases = [\"reset_cooldown\"]) async def resetcooldown(self, ctx, *, command", "user: await ctx.reply(\"Please give me a message to use.\") message", "traceback.print_exc() @commands.command(brief = \"displays the guild status and user status", "self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *, cog = None):", "await ctx.send(\"changing status, check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers", "def sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor = await", "cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name = ?\", (name,)) await self.bot.sus_users.commit()", "ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a command to remove sus users.\")", "doesn't have a cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset", "None): args = args or \"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\"))", "can't use that\") @commands.command(brief=\"Commands to see what guilds a person", "only)\",help=\"Gives a list of guilds(Bot Owners only) but with join", "to reload no cogs\") @commands.command() async def shutdown(self, ctx): await", "sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT", "if args is None:await ctx.send(\"Can't reload module named None\") if", "twitter.\") try: tweet_time = functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None,", "occured at {e}\") await ctx.send(f\"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\")", "sus users.\") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title", "discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a command to give a list", "pag = commands.Paginator() for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`)", "pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await", "await ctx.send(\"you can't ask to load no cogs.\") @commands.command() async", "a cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of", "= await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0,", "await cur.close() await ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a command to", "owner only command\") @commands.command(brief=\"Only owner command to change bot's nickname\")", "not user.id in self.bot.testers: return await ctx.send(f\"{user} isn't in the", "can't send nothing to twitter.\") try: tweet_time = functools.partial(self.tweepy_post, args)", "{item[1]}\", inline = False) return embed @commands.command(brief=\"a command to grab", "to JDBot Twitter\") async def send_tweet(self, ctx, *, args =", "@commands.command(brief=\"only works with JDJG, but this command is meant to", "ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears not to have valid perms\")", "menu, item): embed=discord.Embed(title = \"Users Deemed Suspicious by JDJG Inc.", "load(self, ctx, *, cog = None): if cog: try: self.bot.load_extension(cog)", "it like that.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You", "class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0,", "pages or [\"No shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after =", "await menu.start(ctx) @commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit() await", "ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use", ", discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy import", "else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known as {user}\") @commands.command(brief=\"A command", "= await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list WHERE user_id =", "def chunk_guild(self, ctx): if ctx.guild is None: return await ctx.send(\"You", "def reload(self, ctx, *, cog = None): cog = cog", "list of servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only)\")", "failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self,", "{value.__package__}\") @commands.command(brief=\"backs up a channel and then sends it into", "(`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for page", "self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"done", "perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use that in Dms.\")", "#I need to fix all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\")", "async def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief =", "await ctx.send(f\"Removed the rfm value {name}.\") @commands.command(brief = \"a command", "{url} to the rtfm DB\") @commands.command(brief = \"removes packages from", "cog == \"all\": for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except", "if user: await ctx.reply(\"Please give me a message to use.\")", "for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style", "rtfm DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx, name = None,", "= textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page =", "if cog: try: self.bot.load_extension(cog) except Exception as e: await ctx.send(e)", "= await self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT * FROM SUS_USERS;\")", "as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading all cogs(check", "= [\"remove_rtfm\"]) async def removertfm(self, ctx, *, name = None):", "self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added {name} and {url} to the", "None can't do anything.\") await ctx.send(\"Bot leaving guild :(\") try:", "None): if user is None: await ctx.reply(\"User not found, returning", "await ctx.send(\"Loaded cog(see if there's any errors)\") if cog is", "channel = ctx.author.dm_channel) @commands.command(brief=\"A command to add sus_users with a", "return await ctx.send(\"please specificy a command\") command_wanted = self.bot.get_command(command) if", "self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that\") @commands.command(brief=\"Commands to", "= ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if (ctx.author.dm_channel is", "return await ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package: {value.__package__}\")", "servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only) but with", "cooldown for you.\",aliases = [\"reset_cooldown\"]) async def resetcooldown(self, ctx, *,", "can't do anything.\") await ctx.send(\"Bot leaving guild :(\") try: await", "okay?\") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception", "imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\")", "to grab all in the sus_users list\") async def sus_users(self,", "await cur.close() await ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource): async def", "@commands.command(brief=\"a command to remove sus users.\") async def removesus(self, ctx,", "except Exception as e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"],", "user: cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list VALUES", "ctx.send(f\"Result of the input was {result}\") @commands.command(brief=\"a powerful owner tool", "Nickname\") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears not to", "added. user = user or ctx.author if command: command_wanted=self.bot.get_command(command) if", "await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed", "ctx.send(\"Can't reload module named None\") if args: try: module =", "not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester known as", "ctx, *, args = None): if await self.bot.is_owner(ctx.author): if args:", "None): guild = guild or ctx.guild if guild is None:", "if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need", "in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester known as {user}\") else:", "ctx.reply(\"Please give me a reason why:\") reason = await self.bot.wait_for(\"message\",check=", "packages from the rtfm DB\", aliases = [\"remove_rtfm\"]) async def", "is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await", "g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\")", "try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc() return await", "and user status immediately\") async def stats_status(self, ctx): await ctx.send(\"changing", "None): if not name or not url or not name", "ctx.send(error) traceback.print_exc() #I need to fix all cog_command_error @commands.command(brief=\"Changes Bot", "ctx.author.dm_channel) @commands.command(brief=\"A command to add sus_users with a reason\") async", "async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1),", "need to provide some attachments.\") await ctx.send(\"JDJG doesn't take any", "reloadable.\") async def reload_basic(self, ctx, *, args = None): if", "command to give a list of servers(owner only)\",help=\"Gives a list", "True) await menu.start(ctx) @commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit()", "= \"Testing Users:\", color = random.randint(0, 16777215)) embed.add_field(name = \"User", "DB\", aliases = [\"remove_rtfm\"]) async def removertfm(self, ctx, *, name", "command = None): if not command: return await ctx.send(\"please specificy", "= \"sends tweet to JDBot Twitter\") async def send_tweet(self, ctx,", "already!\") def tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret", "valid perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use that in", "Exception as e: traceback.print_exc() return await ctx.send(f\"Exception occured at {e}\")", "return await ctx.send(e) try: value=importlib.reload(module) except Exception as e: traceback.print_exc()", "of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a guild", "def save_image(self, ctx): if not ctx.message.attachments: return await ctx.send(\"You need", "def addsus(self, ctx, *, user: utils.BetterUserconverter = None): if user", "error occured: {e}\") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *,", "webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png')", "is None: await ctx.reply(\"User not found, returning Letter\") user =", "responbility for what you upload here :eyes: don't upload anything", "Exception as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see if", "await ctx.send(f\"Removed tester known as {user}\") @commands.command(brief=\"A command to add", "if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if", "= ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message)", "if user: await ctx.reply(\"Please give me a reason why:\") reason", "x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await", "as e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except Exception", "\"sends tweet to JDBot Twitter\") async def send_tweet(self, ctx, *,", "def channel_backup(self, ctx): messages = await ctx.channel.history(limit = None, oldest_first", "existent user.\") if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT", "e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should be unloaded just", "self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users WHERE user_id = ?\", (user.id,))", "def cog_command_error(self, ctx, error): if ctx.command or not ctx.command.has_error_handler(): await", "the rfm value {name}.\") @commands.command(brief = \"a command to save", "None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await", "aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx, name = None, *, url", "guilds a person is in.\") async def mutualguilds(self, ctx, *,", "ctx.guild if guild is None: return await ctx.send(\"Guild is None", "user is None: await ctx.send(\"You can't have a none user.\")", "{ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is", "\"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result of the input", "user = user or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted:", "if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is", "have a cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown", "menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error): await ctx.send(error) class", "name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\")) @stats_status.error async def stats_status_error(self, ctx,", "f\"**Reason :** {item[1]}\", inline = False) return embed @commands.command(brief=\"a command", "a list of servers(owner only)\",help=\"Gives a list of guilds(Bot Owners", "= None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e:", "a reason why:\") reason = await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur =", "this guils so, I kept it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"],", "ctx.send(\"you can't send nothing to twitter.\") try: tweet_time = functools.partial(self.tweepy_post,", "user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for page in pag.pages] pages", "to send mail\") async def mail(self, ctx, *, user: utils.BetterUserconverter", "await cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added", "DM.\") if ctx.guild.chunked: return await ctx.send(\"No need to chunk this", "await ctx.send(\"Bot leaving guild :(\") try: await guild.leave() except Exception", "not command_wanted: return await ctx.send(\"please specify a command\") if not", "if it gets added. user = user or ctx.author if", "def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx,", "one command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send(\"Please", "@commands.command(brief=\"a powerful owner tool to reload local files that aren't", "def channel_backup_error(self, ctx, error): etype = type(error) trace = error.__traceback__", "send mail\") async def mail(self, ctx, *, user: utils.BetterUserconverter =", "None: await ctx.send(\"can't have a user be none.\") if user:", "for what you upload here :eyes: don't upload anything bad", ":(\") @commands.command(brief = \"resets cooldown for you.\",aliases = [\"reset_cooldown\"]) async", "def sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def", "try: await ctx.message.delete() except: await ctx.send(\"It couldn't delete the message", "post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc()", "any errors)\") @commands.command() async def unload(self, ctx, *, cog =", "sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for g in", "color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url =", "of servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only) but", "as e: traceback.print_exc() return await ctx.send(f\"Exception occured at {e}\") await", "embed @commands.command(brief=\"a command to give a list of servers(owner only)\",help=\"Gives", "commands.Paginator() for g in user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for", "await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove testers\") async def remove_tester(self,", "now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\")) @stats_status.error", "reload_basic(self, ctx, *, args = None): if args is None:await", "only)\") async def chunk_guild(self, ctx): if ctx.guild is None: return", "def stats_status(self, ctx): await ctx.send(\"changing status, check now....\") await self.bot.change_presence(status=discord.Status.online,", "only)\") async def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag =", "ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if", "cog_command_error(self, ctx, error): if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error)", "local files that aren't reloadable.\") async def reload_basic(self, ctx, *,", "doesn't take any responbility for what you upload here :eyes:", "Updates\") await webhook.send(embed=embed) if args is None: await ctx.send(\"You sadly", "ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds packages and urls to rtfm", "send nothing to twitter.\") try: tweet_time = functools.partial(self.tweepy_post, args) post", "ctx, *, cog = None): cog = cog or \"all\"", "else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}\" if msg.content else", "await cur.execute(\"INSERT INTO testers_list VALUES (?)\", (user.id,)) await self.bot.sus_users.commit() await", "sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx,", "= f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None): await", "guild: guild.me.joined_at) pag = commands.Paginator() for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at,", "\\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\")", "to provide some attachments.\") await ctx.send(\"JDJG doesn't take any responbility", "a file or mystbin\") async def channel_backup(self, ctx): messages =", "ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed =", "\"leaves a guild only use when needed or really wanted.", "user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list WHERE", "or ctx.author pag = commands.Paginator() for g in user.mutual_guilds: pag.add_line(f\"{g}\")", "async def shutdown(self, ctx): await ctx.send(\"shutdown/logout time happening.\") await self.bot.close()", "ctx.send(\"You can't have a none user.\") if user: cur =", "self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's an owner only command\") @commands.command(brief=\"Only", "await self.bot.sus_users.commit() await cur.close() await ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a", "= False) return embed @commands.command(brief = \"a command listed all", "self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result of the input was {result}\")", "def remove_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user", "await ctx.send(\"You sadly can't use it like that.\") if await", "or g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for page in pag.pages] menu", "reload(self, ctx, *, cog = None): cog = cog or", "functools, tweepy import traceback, textwrap from discord.ext.menus.views import ViewMenuPages class", "name or not url or not name and not url:", "no cogs.\") @commands.command() async def reload(self, ctx, *, cog =", "pages = pages or [\"No shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages,", "(?, ?)\", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"added", "user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users WHERE", "{msg.attachments}\" if msg.content else f\"{msg.author} ({('Bot' if msg.author.bot else 'User')})", "session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await", "cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester", "import random , discord, os, importlib, mystbin, typing, aioimgur, functools,", "= None): user = user or ctx.author pag = commands.Paginator()", "known as {user}\") else: return await ctx.send(f\"{user} is in the", "guild :(\") try: await guild.leave() except Exception as e: await", "= \"a command listed all the commands\") async def testers(self,", "like that.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't", "await ctx.reply(\"Please give me a message to use.\") message =", "Package: {value.__package__}\") @commands.command(brief=\"backs up a channel and then sends it", "removertfm(self, ctx, *, name = None): if name is None:", "chunk a guild that doesn't exist or a channel that", "ctx.send(\"No need to chunk this guild, it appears to be", "guild = guild or ctx.guild if guild is None: return", "none.\") if user: await ctx.reply(\"Please give me a reason why:\")", "await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc() return await", "commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should be", "message in this guils so, I kept it here.\") webhook", "self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's an owner", "await ctx.send(\"shutdown/logout time happening.\") await self.bot.close() async def cog_check(self, ctx):", "for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel", "can't use that command\") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu,", "listed all the commands\") async def testers(self, ctx): menu =", "user: await ctx.reply(\"Please give me a reason why:\") reason =", "if cog is None: await ctx.send(\"you can't ask to load", "a list of guilds(Bot Owners only)\") async def servers(self, ctx):", "my webhook\") async def webhook_update(self, ctx, *, args = None):", "= ctx.author if user: await ctx.reply(\"Please give me a message", "= \"leaves a guild only use when needed or really", "traceback.print_exc() #I need to fix all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner", "await ctx.send(\"It couldn't delete the message in this guils so,", "owner only\") @commands.command(brief=\"only works with JDJG, but this command is", "removesus(self, ctx, *, user: utils.BetterUserconverter = None): if user is", "input was {result}\") @commands.command(brief=\"a powerful owner tool to reload local", "1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await", "is None: return await ctx.send(\"You can't chunk a guild that", "else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments :", "to fix all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\") async def", "imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove testers\") async", "@sus_users.error async def sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource):", "ctx, error): etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error,", "= tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token,", "a command\") command_wanted = self.bot.get_command(command) if not command_wanted: return await", "await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx)", "activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await", "await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?, ?)\", (name, url)) await", "textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot):", "Owners only)\") async def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag", "= post_text) @commands.command(brief = \"sends tweet to JDBot Twitter\") async", "from the rtfm DB\", aliases = [\"remove_rtfm\"]) async def removertfm(self,", "except: await ctx.send(\"It couldn't delete the message in this guils", "self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\")) @stats_status.error async def", "into a file or mystbin\") async def channel_backup(self, ctx): messages", "menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await", "args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author) is False:", "= pages or [\"No shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after", "ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded :D (check for any errors)\")", "specificy a command\") command_wanted = self.bot.get_command(command) if not command_wanted: return", "see what guilds a person is in.\") async def mutualguilds(self,", "{len(self.bot.users)} users\")) @stats_status.error async def stats_status_error(self, ctx, error): await ctx.send(error)", "is False: await ctx.send(\"That's an owner only command\") @commands.command(brief=\"Only owner", "await ctx.send(\"You need a name and also url.\") cur =", "is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client", "**{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for", "sus_users VALUES (?, ?)\", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close()", "await ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays the guild status and", "text file to mystbin: \\n{paste.url}\") @channel_backup.error async def channel_backup_error(self, ctx,", "def aioinput_test(self, ctx, *, args = None): args = args", "return await ctx.send(f\"{user} isn't in the testers list.\") else: self.bot.testers.remove(user.id)", "shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel", "embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at))", "for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or", "consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api", "await ctx.guild.chunk(cache = True) await ctx.send(\"Finished chunking..\") @chunk_guild.error async def", "def command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *, command =", "'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}\" if msg.content else f\"{msg.author}", "valid command.\") if command is None: await ctx.send(\"select a command", "None: await ctx.send(\"select a command :(\") @commands.command(brief = \"resets cooldown", "@commands.command(brief = \"adds packages and urls to rtfm DB\", aliases=[\"add_rtfm\"])", "traceback.print_exc() return await ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package:", "await ctx.send(\"please specificy a command\") command_wanted = self.bot.get_command(command) if not", "that doesn't exist or a channel that is a DM.\")", "self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def", "await ctx.send(\"Cog should be unloaded just fine :D.(check any errors)\")", "channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values)", "if ctx.guild.chunked: return await ctx.send(\"No need to chunk this guild,", "only to be used in testing guilds only)\") async def", "Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args)", "import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot = bot", "= None): args = args or \"Test\" result=await self.bot.loop.run_in_executor(None, input,", "await ctx.send(\"You can't use that in Dms.\") if await self.bot.is_owner(ctx.author)", "cog = cog or \"all\" if cog == \"all\": for", "delete_message_after = True) await menu.start(ctx) @commands.command() async def update_sus(self, ctx):", "ctx.send(\"shutdown/logout time happening.\") await self.bot.close() async def cog_check(self, ctx): return", "args: return await ctx.send(\"you can't send nothing to twitter.\") try:", "specify a valid command.\") if command is None: await ctx.send(\"select", "discord.Embed(title = \"Testing Users:\", color = random.randint(0, 16777215)) embed.add_field(name =", "send updates to my webhook\") async def webhook_update(self, ctx, *,", "await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A command to", "INTO RTFM_DICTIONARY VALUES (?, ?)\", (name, url)) await self.bot.sus_users.commit() await", "random , discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy", "random.randint(0, 16777215)) embed.add_field(name = f\"User ID : {item[0]}\", value =", "def send_tweet(self, ctx, *, args = None): if not args:", "= ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id", "list of guilds(Bot Owners only)\") async def servers(self, ctx): if", "embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session =", ": {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments : {msg.attachments}\" for", "= [page.strip(\"`\") for page in pag.pages] pages = pages or", "sends it into a file or mystbin\") async def channel_backup(self,", "testers_list VALUES (?)\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not", "nothing to twitter.\") try: tweet_time = functools.partial(self.tweepy_post, args) post =", "command to change bot's nickname\") async def change_nick(self, ctx ,*,", "Exception as e: traceback.print_exc() return await ctx.send(e) await ctx.send(f\"Sucessfully reloaded", "ctx, *, cog = None): if cog: try: self.bot.unload_extension(cog) except", "grab all in the sus_users list\") async def sus_users(self, ctx):", "@commands.command(brief=\"A command to add testers\") async def add_tester(self, ctx, *,", "try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await", "@commands.command(brief = \"leaves a guild only use when needed or", ":** {item[1]}\", inline = False) return embed @commands.command(brief=\"a command to", "RTFM_DICTIONARY VALUES (?, ?)\", (name, url)) await self.bot.sus_users.commit() await cur.close()", "url = None): if not name or not url or", "of guilds(Bot Owners only)\") async def servers(self, ctx): if await", "to change bot's nickname\") async def change_nick(self, ctx ,*, name=None):", "await ctx.send(\"You can't have a none user.\") if user: cur", "really wanted. Otherwise no thanks.\") async def leave_guild(self, ctx, *,", "await guild.leave() except Exception as e: await ctx.send(f\"Somehow an error", "usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await ctx.send(\"Please specify a", "is None): await user.create_dm() try: await user.send(embed=embed_message) except: user =", "guild that doesn't exist or a channel that is a", "1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if", "return await ctx.send(\"No need to chunk this guild, it appears", "discord.Forbidden: await ctx.send(\"Appears not to have valid perms\") if isinstance(ctx.channel,discord.DMChannel):", "packages and urls to rtfm DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self,", "I kept it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session)", "can't remove None\") cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM", "discord.ext import commands, menus import utils import random , discord,", "None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client =", "@commands.command(brief=\"Commands to see what guilds a person is in.\") async", ",user: utils.BetterUserconverter = None, *, command = None): #make sure", "color = random.randint(0, 16777215)) embed.add_field(name = f\"User ID : {item[0]}\",", "to save images to imgur(for owner only lol)\") async def", "a guild for the purpose of testing purpose(it's owner only", "async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None): guild", "cog = None): cog = cog or \"all\" if cog", "updated.\") async def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds =", "'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\\n\") pages =", "async def channel_backup_error(self, ctx, error): etype = type(error) trace =", "tweepy import traceback, textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog):", "not name or not url or not name and not", "if user is None: await ctx.send(\"You can't have a non", "else: return await ctx.send(f\"{user} is in the testers list already!\")", "typing, aioimgur, functools, tweepy import traceback, textwrap from discord.ext.menus.views import", "pag = commands.Paginator() for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style =", "a DM.\") if ctx.guild.chunked: return await ctx.send(\"No need to chunk", "pages = [page.strip(\"`\") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages,", "(name, url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added {name} and", "*, guild: typing.Optional[discord.Guild] = None): guild = guild or ctx.guild", "= ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx,", "Owner(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(brief=\"a command to", "testing purpose(it's owner only to be used in testing guilds", "await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: return", "{msg.attachments}\" for msg in messages) mystbin_client = mystbin.Client(session = self.bot.session)", "ctx, name = None, *, url = None): if not", "ctx, *, name = None): if name is None: return", "up a channel and then sends it into a file", "None: await ctx.send(\"You sadly can't use it like that.\") if", "await ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears", "async def channel_backup(self, ctx): messages = await ctx.channel.history(limit = None,", "utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url))", "module named None\") if args: try: module = importlib.import_module(name=args) except", "bot): self.bot = bot @commands.command(brief=\"a command to send mail\") async", "only lol)\") async def save_image(self, ctx): if not ctx.message.attachments: return", "= None): if not args: return await ctx.send(\"you can't send", "await ctx.reply(\"User not found, returning Letter\") user = ctx.author if", "isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use that in Dms.\") if await", "in the sus_users list\") async def sus_users(self, ctx): cur =", "'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments : {msg.attachments}\"", "= bot @commands.command(brief=\"a command to send mail\") async def mail(self,", "ctx.send(\"You can't chunk a guild that doesn't exist or a", "that in Dms.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You", "in the testers list already!\") def tweepy_post(self, post_text = None):", "isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden:", "args = None): args = args or \"Test\" result=await self.bot.loop.run_in_executor(None,", "list\") async def sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor", "it's owner only\") @commands.command(brief=\"only works with JDJG, but this command", "twitter_api.update_status(status = post_text) @commands.command(brief = \"sends tweet to JDBot Twitter\")", "if (user.dm_channel is None): await user.create_dm() try: await user.send(embed=embed_message) except:", "if args is None: await ctx.send(\"You sadly can't use it", "await webhook.send(embed=embed) if args is None: await ctx.send(\"You sadly can't", "to autoconverter if it gets added. user = user or", "guilds(Bot Owners only)\") async def servers(self, ctx): if await self.bot.is_owner(ctx.author):", "is False: await ctx.send(\"You can't use that\") @commands.command(brief=\"Commands to see", "for page in pag.pages] pages = pages or [\"No shared", "to have valid perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use", "= False) return embed @commands.command(brief=\"a command to grab all in", "{e}\") await ctx.send(f\"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief =", "in messages) mystbin_client = mystbin.Client(session = self.bot.session) paste = await", "a guild only use when needed or really wanted. Otherwise", "await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session)", "await ctx.send(\"you can't send nothing to twitter.\") try: tweet_time =", "embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if args is None: await", "menu, item): embed = discord.Embed(title = \"Testing Users:\", color =", "= await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name =", "this guild, it appears to be chunked\") await ctx.guild.chunk(cache =", "not name and not url: return await ctx.send(\"You need a", "args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await ctx.send(\"It", "= os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token", "await ctx.send(\"That doesn't have a cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx)", "args = None): if not args: return await ctx.send(\"you can't", "chunking..\") @chunk_guild.error async def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc()", "meant to send updates to my webhook\") async def webhook_update(self,", "def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief=\"a command to give", "ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name} now accessible", "= self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed)", "user.\") if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM", "async def unload(self, ctx, *, cog = None): if cog:", "use when needed or really wanted. Otherwise no thanks.\") async", "in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')}", "{name}.\") @commands.command(brief = \"a command to save images to imgur(for", "result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result of the input was", "a non existent user.\") if user: cur = await self.bot.sus_users.cursor()", "ctx.send(\"Bot leaving guild :(\") try: await guild.leave() except Exception as", "None): if args is None:await ctx.send(\"Can't reload module named None\")", "or a channel that is a DM.\") if ctx.guild.chunked: return", "return await ctx.send(\"you can't send nothing to twitter.\") try: tweet_time", "command: return await ctx.send(\"please specificy a command\") command_wanted = self.bot.get_command(command)", "or [\"No shared servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True)", "async def sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async", "async def command_bypass(self, ctx ,user: utils.BetterUserconverter = None, *, command", "a person is in.\") async def mutualguilds(self, ctx, *, user:", "self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"],", "command_wanted: await ctx.send(f\"{command_wanted.name} now accessible for the {user} for one", "purpose of testing purpose(it's owner only to be used in", "by JDJG Inc. Official\", color = random.randint(0, 16777215)) embed.add_field(name =", "def mail(self, ctx, *, user: utils.BetterUserconverter = None): if user", "*, cog = None): cog = cog or \"all\" if", "or not url or not name and not url: return", "async def sus_users(self, ctx): cur = await self.bot.sus_users.cursor() cursor =", "await ctx.send(\"You can't use that\") @commands.command(brief=\"Commands to see what guilds", "@update_sus.error async def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async", "list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc()", "await ctx.send(\"No need to chunk this guild, it appears to", "if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in msg.embeds)}", "x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove testers\") async def", "cur.close() await ctx.send(f\"Removed the rfm value {name}.\") @commands.command(brief = \"a", "await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears not to have valid", "async def addrtfm(self, ctx, name = None, *, url =", "else f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for", "if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name) except", "'d')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel", "* FROM SUS_USERS;\") sus_users = tuple(await cursor.fetchall()) await cur.close() await", "await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove testers\")", "await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds packages and urls to", "ctx ,user: utils.BetterUserconverter = None, *, command = None): #make", "for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as", "immediately\") async def stats_status(self, ctx): await ctx.send(\"changing status, check now....\")", "= None): if not command: return await ctx.send(\"please specificy a", "twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief = \"sends", "kept it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at))", "guild.me.joined_at) pag = commands.Paginator() for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style", "@commands.command(brief = \"a command to save images to imgur(for owner", "await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False:", "ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error):", "if cog == \"all\": for x in list(self.bot.extensions): try: self.bot.reload_extension(x)", "shutdown(self, ctx): await ctx.send(\"shutdown/logout time happening.\") await self.bot.close() async def", "await ctx.send(\"You can't have a non existent user.\") if user:", "= self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f\"Added text file", "f\"User ID : {item[0]}\", value = f\"**Reason :** {item[1]}\", inline", "16777215)) return embed @commands.command(brief=\"a command to give a list of", "None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key,", "remove testers\") async def remove_tester(self, ctx, *, user: utils.BetterUserconverter =", "isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await ctx.send(\"It couldn't delete", "a name and also url.\") cur = await self.bot.sus_users.cursor() await", "list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known as {user}\") @commands.command(brief=\"A", "users, succesfully\") @commands.command(brief=\"a command to remove sus users.\") async def", "user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def", "the {user} for one command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is", "channel and then sends it into a file or mystbin\")", "to add testers\") async def add_tester(self, ctx, *, user: utils.BetterUserconverter", "command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name} now accessible for the {user}", "@commands.command() async def reload(self, ctx, *, cog = None): cog", "def change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel,", "command to send mail\") async def mail(self, ctx, *, user:", "guils so, I kept it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session", "ctx.send(\"done reloading all cogs(check for any errors)\") else: try: self.bot.reload_extension(cog)", "try: await user.send(embed=embed_message) except: user = ctx.author await user.send(content=\"Message failed.", "= \"adds packages and urls to rtfm DB\", aliases=[\"add_rtfm\"]) async", "embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if args", "None, *, command = None): #make sure to swap to", "command to add sus_users with a reason\") async def addsus(self,", "load no cogs.\") @commands.command() async def reload(self, ctx, *, cog", "ctx.send(\"select a command :(\") @commands.command(brief = \"resets cooldown for you.\",aliases", "ctx, *, command = None): if not command: return await", "post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth", "{name} and {url} to the rtfm DB\") @commands.command(brief = \"removes", "ctx , * , args=None): if await self.bot.is_owner(ctx.author): if args:", "ctx.reply(\"Please give me a message to use.\") message = await", "the commands\") async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page", "nickname\") async def change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author):", "that is a DM.\") if ctx.guild.chunked: return await ctx.send(\"No need", "module = importlib.import_module(name=args) except Exception as e: traceback.print_exc() return await", "= True).flatten() new_line = \"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot' if", "= utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from:", "user: utils.BetterUserconverter = None): if user is None: await ctx.send(\"You", "{new_line.join(f'{e.to_dict()}' for e in msg.embeds)} {new_line}Attachments : {msg.attachments}\" for msg", "updates to my webhook\") async def webhook_update(self, ctx, *, args", "pag.pages] pages = pages or [\"No shared servers\"] menu =", "was {result}\") @commands.command(brief=\"a powerful owner tool to reload local files", "self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that command\") class", "None): if not args: return await ctx.send(\"you can't send nothing", "async def reload(self, ctx, *, cog = None): cog =", "the guild status and user status immediately\") async def stats_status(self,", "not found, returning Letter\") user = ctx.author if user: await", "def reload_basic(self, ctx, *, args = None): if args is", "tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret')", "args is None: await ctx.send(\"You sadly can't use it like", "await ctx.send(\"select a command :(\") @commands.command(brief = \"resets cooldown for", "discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\")", "for any errors)\") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e:", "await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users VALUES (?, ?)\", (user.id,", "= 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) |", "embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if args is None: await ctx.send(\"You", "Exception as e: await ctx.send(f\"Somehow an error occured: {e}\") traceback.print_exc()", "ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste", "give me a reason why:\") reason = await self.bot.wait_for(\"message\",check= utils.check(ctx))", "sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a guild for", "{(g.system_channel or g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for page in pag.pages]", "owner command to change bot's nickname\") async def change_nick(self, ctx", "def format_page(self, menu, item): embed=discord.Embed(title = \"Users Deemed Suspicious by", "= sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for g", "key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for g in sorted_guilds:", "ctx.send(\"Loaded cog(see if there's any errors)\") if cog is None:", "\"displays the guild status and user status immediately\") async def", "channel that is a DM.\") if ctx.guild.chunked: return await ctx.send(\"No", "cur.close() await ctx.send(\"added sus users, succesfully\") @commands.command(brief=\"a command to remove", "when needed or really wanted. Otherwise no thanks.\") async def", "= None, oldest_first = True).flatten() new_line = \"\\n\" page =", "not ctx.message.attachments: return await ctx.send(\"You need to provide some attachments.\")", "(?, ?)\", (name, url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added", "(check for any errors)\") @commands.command() async def unload(self, ctx, *,", "to imgur(for owner only lol)\") async def save_image(self, ctx): if", "mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief =", "Exception as e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"])", "= \"a command to save images to imgur(for owner only", "cog is None: await ctx.send(\"you can't ask to load no", ", * , args=None): if await self.bot.is_owner(ctx.author): if args: await", "not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to fix all", "self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog", "cog is None: await ctx.send(\"you can't ask to reload no", "self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if", "guild or ctx.guild if guild is None: return await ctx.send(\"Guild", "def webhook_update(self, ctx, *, args = None): if await self.bot.is_owner(ctx.author):", "ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True) await menu.start(ctx) @commands.command()", "new_line = \"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else", "if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users", "None): if user is None: await ctx.send(\"You can't have a", "cur.close() await ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource): async def format_page(self,", "Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session)", "bot's nickname\") async def change_nick(self, ctx ,*, name=None): if await", "menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self,", "@commands.command(brief=\"a command to send mail\") async def mail(self, ctx, *,", "user.send(embed=embed_message) except: user = ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent", "(user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"added sus users,", "is None: await ctx.send(\"Please specify a valid command.\") if command", "await ctx.send(f\"{user} isn't in the testers list.\") else: self.bot.testers.remove(user.id) await", "{value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up a channel and then", "import traceback, textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def", "tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief = \"sends tweet to", "@commands.command(brief = \"removes packages from the rtfm DB\", aliases =", "traceback.print_exc() await ctx.send(\"done reloading all cogs(check for any errors)\") else:", "async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self,", "if cog is None: await ctx.send(\"you can't ask to reload", "command\") command_wanted = self.bot.get_command(command) if not command_wanted: return await ctx.send(\"please", "async def load(self, ctx, *, cog = None): if cog:", "url: return await ctx.send(\"You need a name and also url.\")", "aliases = [\"remove_rtfm\"]) async def removertfm(self, ctx, *, name =", "False) return embed @commands.command(brief = \"a command listed all the", "*, user: utils.BetterUserconverter = None): user = user or ctx.author", "ctx.send(f\"added {name} and {url} to the rtfm DB\") @commands.command(brief =", "for x in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e:", "if msg.content else f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) :", ": {item[0]}\", value = f\"**Reason :** {item[1]}\", inline = False)", "await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A command to add sus_users", "user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester known as {user}\")", "g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for page in pag.pages] menu =", "async def mail(self, ctx, *, user: utils.BetterUserconverter = None): if", "found, returning Letter\") user = ctx.author if user: await ctx.reply(\"Please", "async def removesus(self, ctx, *, user: utils.BetterUserconverter = None): if", "guild: typing.Optional[discord.Guild] = None): guild = guild or ctx.guild if", "*, command = None): #make sure to swap to autoconverter", "await ctx.send(f\"Somehow an error occured: {e}\") traceback.print_exc() @commands.command() async def", "ctx.send(\"you can't ask to reload no cogs\") @commands.command() async def", "anything.\") await ctx.send(\"Bot leaving guild :(\") try: await guild.leave() except", "Only)\") async def status(self , ctx , * , args=None):", "or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name} now", "return embed @commands.command(brief = \"a command listed all the commands\")", "name = ?\", (name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed", "now accessible for the {user} for one command usage!\") self.bot.special_access[user.id]=command_wanted.name", "sus_users = tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu =", "all cogs(check for any errors)\") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError", "a channel and then sends it into a file or", "await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215))", "[page.strip(\"`\") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if", "textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after", "type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values,", "await ctx.send(\"You can't use that command\") class ServersEmbed(menus.ListPageSource): async def", "async def save_image(self, ctx): if not ctx.message.attachments: return await ctx.send(\"You", "VALUES (?)\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id", "ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief = \"leaves a guild only", "if command_wanted is None: await ctx.send(\"Please specify a valid command.\")", "= commands.Paginator() for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) |", "= None): if user is None: await ctx.reply(\"User not found,", "await ctx.send(\"Appears not to have valid perms\") if isinstance(ctx.channel,discord.DMChannel): await", "webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\")", "https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a guild for the purpose of", "ctx.author.send(content=f\"Added text file to mystbin: \\n{paste.url}\") @channel_backup.error async def channel_backup_error(self,", "True).flatten() new_line = \"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot", "so, I kept it here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session =", "mystbin\") async def channel_backup(self, ctx): messages = await ctx.channel.history(limit =", "= \"resets cooldown for you.\",aliases = [\"reset_cooldown\"]) async def resetcooldown(self,", "user: utils.BetterUserconverter = None): if user is None: await ctx.reply(\"User", "update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\") @update_sus.error async", "= \"removes packages from the rtfm DB\", aliases = [\"remove_rtfm\"])", "typing.Optional[discord.Guild] = None): guild = guild or ctx.guild if guild", "ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays the guild", "discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await self.bot.change_presence(status=discord.Status.do_not_disturb) if await self.bot.is_owner(ctx.author)", "import commands, menus import utils import random , discord, os,", "users\")) @stats_status.error async def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief=\"a", "sadly can't use it like that.\") if await self.bot.is_owner(ctx.author) is", "a command :(\") @commands.command(brief = \"resets cooldown for you.\",aliases =", "owner only to be used in testing guilds only)\") async", "async def format_page(self, menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return", "check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\"))", "a reason\") async def addsus(self, ctx, *, user: utils.BetterUserconverter =", "ctx, *, guild: typing.Optional[discord.Guild] = None): guild = guild or", "WHERE name = ?\", (name,)) await self.bot.sus_users.commit() await cur.close() await", "to chunk this guild, it appears to be chunked\") await", "in self.bot.testers: return await ctx.send(f\"{user} isn't in the testers list.\")", "consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access')", "command listed all the commands\") async def testers(self, ctx): menu", "@commands.command(brief = \"displays the guild status and user status immediately\")", "errors)\") if cog is None: await ctx.send(\"you can't ask to", "owner only lol)\") async def save_image(self, ctx): if not ctx.message.attachments:", "ctx.guild.chunk(cache = True) await ctx.send(\"Finished chunking..\") @chunk_guild.error async def chunk_guild_error(self,", "in Dms.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't", "await cur.execute(\"INSERT INTO sus_users VALUES (?, ?)\", (user.id, reason.content)) await", "except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should", "ctx.send(\"Updated SQL boss.\") @update_sus.error async def update_sus_error(self, ctx, error): await", "SUS_USERS;\") sus_users = tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu", "= ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't", "aioimgur, functools, tweepy import traceback, textwrap from discord.ext.menus.views import ViewMenuPages", "ctx, *, user: utils.BetterUserconverter = None): if user is None:", "ctx.send(f\"{user} isn't in the testers list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed", "args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None: await", "e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except Exception as", "a channel that is a DM.\") if ctx.guild.chunked: return await", "FROM sus_users WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await", "= discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text =", "Users:\", color = random.randint(0, 16777215)) embed.add_field(name = \"User ID:\", value", "and {url} to the rtfm DB\") @commands.command(brief = \"removes packages", "error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter", "commands\") async def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page =", "guild.leave() except Exception as e: await ctx.send(f\"Somehow an error occured:", "any errors)\") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await", "that command\") class ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed", "wanted. Otherwise no thanks.\") async def leave_guild(self, ctx, *, guild:", "the rtfm DB\", aliases = [\"remove_rtfm\"]) async def removertfm(self, ctx,", "def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for", "await ctx.channel.history(limit = None, oldest_first = True).flatten() new_line = \"\\n\"", "delete the message in this guils so, I kept it", "in user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for page in pag.pages]", "async def servers(self, ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator()", "ctx, error): await ctx.send(error) @commands.command(brief=\"a command to give a list", "addrtfm(self, ctx, name = None, *, url = None): if", "self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed the rfm value {name}.\") @commands.command(brief", "self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list WHERE user_id = ?\", (user.id,))", "await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\") try: await", "stats_status(self, ctx): await ctx.send(\"changing status, check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching,", "if not user.id in self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester known", "sus users.\") async def removesus(self, ctx, *, user: utils.BetterUserconverter =", "\"a command listed all the commands\") async def testers(self, ctx):", "ctx): cur = await self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT *", "user = ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await", "embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) webhook=discord.Webhook.from_url(os.environ[\"webhook99\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update", "await ctx.send(\"You can't use that it's owner only\") @commands.command(brief=\"only works", "or \"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result of the", "cog = None): if cog: try: self.bot.load_extension(cog) except Exception as", "only\") @commands.command(brief=\"only works with JDJG, but this command is meant", "await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up a", "purpose(it's owner only to be used in testing guilds only)\")", "e: await ctx.send(f\"Somehow an error occured: {e}\") traceback.print_exc() @commands.command() async", "= None): if name is None: return await ctx.send(\"You can't", "ctx.send(\"Guild is None can't do anything.\") await ctx.send(\"Bot leaving guild", "ctx.reply(\"User not found, returning Letter\") user = ctx.author if user:", "= discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a command to give a", "sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator() for", "await menu.start(ctx, channel = ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste =", "add_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is", "have a non existent user.\") if user: cur = await", "= args or \"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result", "why:\") reason = await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor()", "a none user.\") if user: cur = await self.bot.sus_users.cursor() await", "= random.randint(0, 16777215)) embed.add_field(name = f\"User ID : {item[0]}\", value", "[page.strip(\"`\") for page in pag.pages] pages = pages or [\"No", "ctx.send(\"you can't ask to load no cogs.\") @commands.command() async def", "per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await", "ctx.send(\"Please specify a valid command.\") if command is None: await", "occured: {e}\") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *, args", "menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a command", "def removertfm(self, ctx, *, name = None): if name is", "@commands.command() async def load(self, ctx, *, cog = None): if", "in.\") async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None):", "self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is", "if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that", "reload module named None\") if args: try: module = importlib.import_module(name=args)", "ctx.send(f\"{user} is in the testers list already!\") def tweepy_post(self, post_text", "except: user = ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user))", "named None\") if args: try: module = importlib.import_module(name=args) except Exception", "await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args", "await ctx.message.delete() except: await ctx.send(\"It couldn't delete the message in", "def addrtfm(self, ctx, name = None, *, url = None):", "ID : {item[0]}\", value = f\"**Reason :** {item[1]}\", inline =", "f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e", "page in pag.pages] pages = pages or [\"No shared servers\"]", "cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users VALUES (?,", "discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc() return await ctx.send(e)", "cur = await self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT * FROM", "send_tweet(self, ctx, *, args = None): if not args: return", "then sends it into a file or mystbin\") async def", "ctx.send(f\"{command_wanted.name} now accessible for the {user} for one command usage!\")", "users.\") async def removesus(self, ctx, *, user: utils.BetterUserconverter = None):", "existent user.\") if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE", "\"resets cooldown for you.\",aliases = [\"reset_cooldown\"]) async def resetcooldown(self, ctx,", "*, args = None): if args is None:await ctx.send(\"Can't reload", "cog(see if there's any errors)\") if cog is None: await", "INTO sus_users VALUES (?, ?)\", (user.id, reason.content)) await self.bot.sus_users.commit() await", "command to save images to imgur(for owner only lol)\") async", "can't chunk a guild that doesn't exist or a channel", "{result}\") @commands.command(brief=\"a powerful owner tool to reload local files that", "= True) await ctx.send(\"Finished chunking..\") @chunk_guild.error async def chunk_guild_error(self, ctx,", "RTFM_DICTIONARY WHERE name = ?\", (name,)) await self.bot.sus_users.commit() await cur.close()", "non existent user.\") if user: cur = await self.bot.sus_users.cursor() await", "None: await ctx.reply(\"User not found, returning Letter\") user = ctx.author", "os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return", "cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?, ?)\", (name, url)) await self.bot.sus_users.commit()", "None): #make sure to swap to autoconverter if it gets", "unloaded just fine :D.(check any errors)\") if cog is None:", "= mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief", "ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await x.read()) await", "except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded", "None): user = user or ctx.author pag = commands.Paginator() for", "embed.add_field(name = f\"User ID : {item[0]}\", value = f\"**Reason :**", "= 1), delete_message_after = True) await menu.start(ctx) @commands.command() async def", "aren't reloadable.\") async def reload_basic(self, ctx, *, args = None):", "if user is None: await ctx.reply(\"User not found, returning Letter\")", "@commands.command(brief = \"a command listed all the commands\") async def", "command_wanted = self.bot.get_command(command) if not command_wanted: return await ctx.send(\"please specify", "ctx.send(\"please specificy a command\") command_wanted = self.bot.get_command(command) if not command_wanted:", "await ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading all cogs(check for any", "None): await user.create_dm() try: await user.send(embed=embed_message) except: user = ctx.author", "of servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only)\") async", "a message to use.\") message = await self.bot.wait_for(\"message\",check = utils.check(ctx))", "self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}**", "a guild that doesn't exist or a channel that is", "bot @commands.command(brief=\"a command to send mail\") async def mail(self, ctx,", "async def cog_command_error(self, ctx, error): if ctx.command or not ctx.command.has_error_handler():", "def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda", "await ctx.send(f\"added {name} and {url} to the rtfm DB\") @commands.command(brief", "JDBot Twitter\") async def send_tweet(self, ctx, *, args = None):", "= tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief = \"sends tweet", "INTO testers_list VALUES (?)\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if", "{e}\") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *, args =", "ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears not", "await self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\") @update_sus.error async def update_sus_error(self,", "if not user.id in self.bot.testers: return await ctx.send(f\"{user} isn't in", "if ctx.guild is None: return await ctx.send(\"You can't chunk a", "if not ctx.message.attachments: return await ctx.send(\"You need to provide some", "use that\") @commands.command(brief=\"Commands to see what guilds a person is", "= self.bot.get_command(command) if not command_wanted: return await ctx.send(\"please specify a", "@commands.command() async def aioinput_test(self, ctx, *, args = None): args", "<reponame>Obsidian-Development/JDBot<filename>cogs/owner.py from discord.ext import commands, menus import utils import random", "= None): #make sure to swap to autoconverter if it", "pages = textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page", "tweet_time = functools.partial(self.tweepy_post, args) post = await self.bot.loop.run_in_executor(None, tweet_time) except", "*, name = None): if name is None: return await", "= ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await", "command is None: await ctx.send(\"select a command :(\") @commands.command(brief =", "need to chunk this guild, it appears to be chunked\")", "and not url: return await ctx.send(\"You need a name and", "testers list already!\") def tweepy_post(self, post_text = None): consumer_key =", "traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await", "ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True)", "= await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users WHERE user_id =", "command :(\") @commands.command(brief = \"resets cooldown for you.\",aliases = [\"reset_cooldown\"])", "on a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief", "args) post = await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e:", ": {msg.content} {new_line}Attachments : {msg.attachments}\" if msg.content else f\"{msg.author} ({('Bot'", "to load no cogs.\") @commands.command() async def reload(self, ctx, *,", "\"a command to save images to imgur(for owner only lol)\")", "ctx, error): if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc()", "ctx.author await user.send(content=\"Message failed. sending\",embed=embed_message) embed_message.add_field(name=\"Sent To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command()", "remove_tester(self, ctx, *, user: utils.BetterUserconverter = None): if user is", "False) return embed @commands.command(brief=\"a command to grab all in the", "class Owner(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(brief=\"a command", "await user.create_dm() try: await user.send(embed=embed_message) except: user = ctx.author await", "await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that it's", "status immediately\") async def stats_status(self, ctx): await ctx.send(\"changing status, check", "in msg.embeds)} {new_line}Attachments : {msg.attachments}\" for msg in messages) mystbin_client", "= os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status =", "bad okay?\") for x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except", "**{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for", "can't have a non existent user.\") if user: cur =", "if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use that in Dms.\") if", "(ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel)", "= \"displays the guild status and user status immediately\") async", "needed or really wanted. Otherwise no thanks.\") async def leave_guild(self,", "await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed the rfm value {name}.\")", "tester known as {user}\") else: return await ctx.send(f\"{user} is in", "(user.dm_channel is None): await user.create_dm() try: await user.send(embed=embed_message) except: user", "async def resetcooldown(self, ctx, *, command = None): if not", "discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\")", "Suspicious by JDJG Inc. Official\", color = random.randint(0, 16777215)) embed.add_field(name", "any responbility for what you upload here :eyes: don't upload", ", ctx , * , args=None): if await self.bot.is_owner(ctx.author): if", "if not args: return await ctx.send(\"you can't send nothing to", "= ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed sus", "e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded :D (check for", "self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users VALUES (?, ?)\", (user.id, reason.content))", "except discord.Forbidden: await ctx.send(\"Appears not to have valid perms\") if", "is None: await ctx.send(\"You can't have a none user.\") if", "e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url =", "utils import random , discord, os, importlib, mystbin, typing, aioimgur,", "g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for page in pag.pages] menu =", "cur.execute(\"DELETE FROM sus_users WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit()", "cur.close() await ctx.send(f\"added {name} and {url} to the rtfm DB\")", "self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name = ?\", (name,))", "await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author)", "None: return await ctx.send(\"You can't remove None\") cur = await", "testers\") async def remove_tester(self, ctx, *, user: utils.BetterUserconverter = None):", "thanks.\") async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] = None):", "= mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f\"Added", "files that aren't reloadable.\") async def reload_basic(self, ctx, *, args", "file to mystbin: \\n{paste.url}\") @channel_backup.error async def channel_backup_error(self, ctx, error):", "command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief = \"leaves a", "rtfm DB\") @commands.command(brief = \"removes packages from the rtfm DB\",", "can't have a none user.\") if user: cur = await", "status and user status immediately\") async def stats_status(self, ctx): await", "= os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth)", "user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not", "None, oldest_first = True).flatten() new_line = \"\\n\" page = \"\\n\".join(f\"{msg.author}", "False: await ctx.send(\"That's an owner only command\") @commands.command(brief=\"Only owner command", "args = args or \"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await", "ctx.author pag = commands.Paginator() for g in user.mutual_guilds: pag.add_line(f\"{g}\") pages", "is None: await ctx.send(\"You can't have a non existent user.\")", "g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style =", "\"adds packages and urls to rtfm DB\", aliases=[\"add_rtfm\"]) async def", "here.\") webhook = discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args)", "async def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief=\"a command to", "access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text) @commands.command(brief =", "or mystbin\") async def channel_backup(self, ctx): messages = await ctx.channel.history(limit", "tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret)", "but with join dates updated.\") async def servers2(self, ctx): if", "await ctx.send(\"Cog reloaded :D (check for any errors)\") @commands.command() async", "random.randint(0, 16777215)) embed.add_field(name = \"User ID:\", value = f\"{item}\", inline", "swap to autoconverter if it gets added. user = user", "as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded :D (check", "args=None): if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args))", "traceback.print_exc() await ctx.send(\"Cog should be unloaded just fine :D.(check any", "= ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None): await", ",*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing", "ctx.send(\"Finished chunking..\") @chunk_guild.error async def chunk_guild_error(self, ctx, error): await ctx.send(error)", "command_wanted: return await ctx.send(\"please specify a command\") if not command_wanted.is_on_cooldown(ctx):", "*, args = None): if not args: return await ctx.send(\"you", "with a reason\") async def addsus(self, ctx, *, user: utils.BetterUserconverter", "?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed sus users.\")", "stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief=\"a command to give a", "if not command: return await ctx.send(\"please specificy a command\") command_wanted", "= True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx,", "per_page = 1),delete_message_after = True) if (ctx.author.dm_channel is None): await", "self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name)", "or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to fix", "Otherwise no thanks.\") async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild]", "Bot Status(Owner Only)\") async def status(self , ctx , *", "of guilds(Bot Owners only) but with join dates updated.\") async", "self.bot.close() async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def", "to be used in testing guilds only)\") async def chunk_guild(self,", "except Exception as e: await ctx.send(f\"Somehow an error occured: {e}\")", "@chunk_guild.error async def chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief", "await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\")) @stats_status.error async", "await ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief = \"leaves a guild", "@commands.command(brief = \"resets cooldown for you.\",aliases = [\"reset_cooldown\"]) async def", "return await ctx.send(\"You need a name and also url.\") cur", "for one command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted is None: await", "= await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT", "ctx.send(\"Cog reloaded :D (check for any errors)\") @commands.command() async def", "await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete()", "per_page = 1), delete_message_after = True) await menu.start(ctx) @commands.command() async", "{msg.content} {new_line}Attachments : {msg.attachments}\" if msg.content else f\"{msg.author} ({('Bot' if", "guild only use when needed or really wanted. Otherwise no", "command = None): #make sure to swap to autoconverter if", "command to add testers\") async def add_tester(self, ctx, *, user:", "the sus_users list\") async def sus_users(self, ctx): cur = await", "*, args = None): if await self.bot.is_owner(ctx.author): if args: if", "= guild or ctx.guild if guild is None: return await", "importlib.import_module(name=args) except Exception as e: traceback.print_exc() return await ctx.send(e) try:", "traceback, textwrap from discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self,", "cur.close() if not user.id in self.bot.testers: return await ctx.send(f\"{user} isn't", "async def change_nick(self, ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if", "e: traceback.print_exc() return await ctx.send(f\"Exception occured at {e}\") await ctx.send(f\"Url", "def testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after", "None: await ctx.send(\"you can't ask to load no cogs.\") @commands.command()", "is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if", "reason\") async def addsus(self, ctx, *, user: utils.BetterUserconverter = None):", "os, importlib, mystbin, typing, aioimgur, functools, tweepy import traceback, textwrap", "give me a message to use.\") message = await self.bot.wait_for(\"message\",check", "await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in self.bot.guilds: pag.add_line(f\"[{len(g.members)}/{g.member_count}]", "is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a guild for the purpose", "(name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed the rfm value", "but this command is meant to send updates to my", "await ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs", "of testing purpose(it's owner only to be used in testing", "reloaded {value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up a channel and", "tweet_time) except Exception as e: traceback.print_exc() return await ctx.send(f\"Exception occured", "ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx ,user:", "discord.ext.menus.views import ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot =", "format_page(self, menu, item): embed = discord.Embed(title = \"Testing Users:\", color", "command_wanted is None: await ctx.send(\"Please specify a valid command.\") if", "command to remove testers\") async def remove_tester(self, ctx, *, user:", "Twitter\") async def send_tweet(self, ctx, *, args = None): if", "reason = await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await", "def status(self , ctx , * , args=None): if await", "await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users WHERE user_id = ?\",", "await ctx.send(error) @commands.command(brief=\"a command to give a list of servers(owner", "await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list WHERE user_id = ?\",", "None, *, url = None): if not name or not", "imgur(for owner only lol)\") async def save_image(self, ctx): if not", "guilds(Bot Owners only) but with join dates updated.\") async def", "exist or a channel that is a DM.\") if ctx.guild.chunked:", "sus_users_error(self, ctx, error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self,", "SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title = \"Users Deemed", "def resetcooldown(self, ctx, *, command = None): if not command:", "await self.bot.is_owner(ctx.author) is False: await ctx.send(\"That's an owner only command\")", "if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments : {msg.attachments}\" if", "= None): if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel):", "mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f\"Added text", "used in testing guilds only)\") async def chunk_guild(self, ctx): if", "= await mystbin_client.post(page) await ctx.author.send(content=f\"Added text file to mystbin: \\n{paste.url}\")", "{user}\") else: return await ctx.send(f\"{user} is in the testers list", "embed @commands.command(brief = \"a command listed all the commands\") async", "if name is None: return await ctx.send(\"You can't remove None\")", "user or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name}", "from discord.ext import commands, menus import utils import random ,", "def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self,", "class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title = \"Users", "embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if", "testers\") async def add_tester(self, ctx, *, user: utils.BetterUserconverter = None):", "msg in messages) mystbin_client = mystbin.Client(session = self.bot.session) paste =", "name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await ctx.send(\"Changing Nickname\")", "command.\") if command is None: await ctx.send(\"select a command :(\")", "FROM testers_list WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await", "servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only)\") async def", "\"User ID:\", value = f\"{item}\", inline = False) return embed", "\"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {msg.content} {new_line}Attachments :", "the testers list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known as", "the rtfm DB\") @commands.command(brief = \"removes packages from the rtfm", "e in msg.embeds)} {new_line}Attachments : {msg.attachments}\" for msg in messages)", "error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width = 1992)", "= discord.Embed(title = \"Testing Users:\", color = random.randint(0, 16777215)) embed.add_field(name", "if guild is None: return await ctx.send(\"Guild is None can't", "embed_message.set_footer(text = f\"{ctx.author.id}\") embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None):", "= user or ctx.author if command: command_wanted=self.bot.get_command(command) if command_wanted: await", "await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await x.read())", "access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api =", "None: await ctx.send(\"you can't ask to reload no cogs\") @commands.command()", "if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await ctx.send(\"It couldn't", "Deemed Suspicious by JDJG Inc. Official\", color = random.randint(0, 16777215))", "with join dates updated.\") async def servers2(self, ctx): if await", "command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name} now accessible for the", "self.bot.wait_for(\"message\",check= utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users", "None\") cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE", "menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True) await", "sus_users with a reason\") async def addsus(self, ctx, *, user:", "reloading all cogs(check for any errors)\") else: try: self.bot.reload_extension(cog) except", "ctx.send(\"That's an owner only command\") @commands.command(brief=\"Only owner command to change", "ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove testers\") async def remove_tester(self, ctx,", "error): if ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I", "if command is None: await ctx.send(\"select a command :(\") @commands.command(brief", "@commands.command() async def shutdown(self, ctx): await ctx.send(\"shutdown/logout time happening.\") await", "cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users = tuple(await cursor.fetchall()) await cur.close()", "embed_message = discord.Embed(title=message.content, timestamp=(message.created_at), color=random.randint(0, 16777215)) embed_message.set_author(name=f\"Mail from: {ctx.author}\",icon_url=(ctx.author.display_avatar.url)) embed_message.set_footer(text", "await cur.execute(\"DELETE FROM sus_users WHERE user_id = ?\", (user.id,)) await", "= [page.strip(\"`\") for page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True)", "ctx.send(\"You need to provide some attachments.\") await ctx.send(\"JDJG doesn't take", "except Exception as e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module)", "to rtfm DB\", aliases=[\"add_rtfm\"]) async def addrtfm(self, ctx, name =", "async def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\")", "ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is", "args or \"Test\" result=await self.bot.loop.run_in_executor(None, input, (f\"{args}:\")) await ctx.send(f\"Result of", "you.\",aliases = [\"reset_cooldown\"]) async def resetcooldown(self, ctx, *, command =", "FROM SUS_USERS;\") sus_users = tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit()", "the message in this guils so, I kept it here.\")", "aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A", "None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await", "upload here :eyes: don't upload anything bad okay?\") for x", "try: await guild.leave() except Exception as e: await ctx.send(f\"Somehow an", "= ctx.author.dm_channel) @commands.command(brief=\"A command to add sus_users with a reason\")", "\"Users Deemed Suspicious by JDJG Inc. Official\", color = random.randint(0,", "update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx", "self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: return await", "ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to fix all cog_command_error", "ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up", "self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if ctx.command or not", "def format_page(self, menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed", "try: module = importlib.import_module(name=args) except Exception as e: traceback.print_exc() return", "ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx,", "auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret = os.getenv('tweet_token')", "self.bot.get_command(command) if not command_wanted: return await ctx.send(\"please specify a command\")", "@commands.command(brief=\"A command to remove testers\") async def remove_tester(self, ctx, *,", "{(g.system_channel or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for page in pag.pages]", "None: return await ctx.send(\"Guild is None can't do anything.\") await", "= f\"**Reason :** {item[1]}\", inline = False) return embed @commands.command(brief=\"a", "@commands.command(brief=\"a command to grab all in the sus_users list\") async", "Official\", color = random.randint(0, 16777215)) embed.add_field(name = f\"User ID :", "def tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key') consumer_secret =", "is in.\") async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter =", "16777215)) embed.add_field(name = \"User ID:\", value = f\"{item}\", inline =", "await ctx.send(f\"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks", "self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag = commands.Paginator()", "can't ask to reload no cogs\") @commands.command() async def shutdown(self,", "the testers list already!\") def tweepy_post(self, post_text = None): consumer_key", "await ctx.send(\"You can't chunk a guild that doesn't exist or", "ctx.send(f\"Removed the rfm value {name}.\") @commands.command(brief = \"a command to", "None:await ctx.send(\"Can't reload module named None\") if args: try: module", "page in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is", "ViewMenuPages class Owner(commands.Cog): def __init__(self, bot): self.bot = bot @commands.command(brief=\"a", "discord.TextChannel): await ctx.send(\"Changing Nickname\") try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await", "happening.\") await self.bot.close() async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author)", "tweet to JDBot Twitter\") async def send_tweet(self, ctx, *, args", "?)\", (user.id, reason.content)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"added sus", "= ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await", "None): if user is None: await ctx.send(\"can't have a user", "Exception as e: traceback.print_exc() return await ctx.send(e) try: value=importlib.reload(module) except", "x.read()) except Exception as e: traceback.print_exc() return await ctx.send(e) imgur_client=", "except Exception as e: traceback.print_exc() return await ctx.send(e) await ctx.send(f\"Sucessfully", "per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel", "user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed", "an error occured: {e}\") traceback.print_exc() @commands.command() async def aioinput_test(self, ctx,", "status(self , ctx , * , args=None): if await self.bot.is_owner(ctx.author):", "*, url = None): if not name or not url", "= await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list VALUES (?)\", (user.id,))", "have valid perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't use that", "cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc()", "self.bot.testers.append(user.id) await ctx.send(f\"added tester known as {user}\") else: return await", "error): etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace)))", "ask to load no cogs.\") @commands.command() async def reload(self, ctx,", "VALUES (?, ?)\", (name, url)) await self.bot.sus_users.commit() await cur.close() await", "mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds packages and urls", "all in the sus_users list\") async def sus_users(self, ctx): cur", "specify a command\") if not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't", "ctx.send(\"You can't use that it's owner only\") def setup(bot): bot.add_cog(Owner(bot))", "if args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except: await", "user: utils.BetterUserconverter = None): user = user or ctx.author pag", "else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc()", "also url.\") cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY", "chunk this guild, it appears to be chunked\") await ctx.guild.chunk(cache", "mystbin, typing, aioimgur, functools, tweepy import traceback, textwrap from discord.ext.menus.views", "= \"User ID:\", value = f\"{item}\", inline = False) return", "status, check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)}", "in pag.pages] pages = pages or [\"No shared servers\"] menu", "chunk_guild(self, ctx): if ctx.guild is None: return await ctx.send(\"You can't", "await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if ctx.command or", "access_secret = os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status", "except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading", "async def remove_tester(self, ctx, *, user: utils.BetterUserconverter = None): if", "None): if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try:", "command is meant to send updates to my webhook\") async", "self.bot.load_extension(cog) except Exception as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded", "ctx.send(\"You need a name and also url.\") cur = await", "self.bot.testers: self.bot.testers.append(user.id) await ctx.send(f\"added tester known as {user}\") else: return", "*, user: utils.BetterUserconverter = None): if user is None: await", "not to have valid perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You can't", "await self.bot.close() async def cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async", "embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if args is", "sus_users list\") async def sus_users(self, ctx): cur = await self.bot.sus_users.cursor()", "?)\", (name, url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added {name}", "guild is None: return await ctx.send(\"Guild is None can't do", "be unloaded just fine :D.(check any errors)\") if cog is", "*, cog = None): if cog: try: self.bot.unload_extension(cog) except commands.errors.ExtensionError", "= type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages =", "ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if (ctx.author.dm_channel is None):", "e: traceback.print_exc() return await ctx.send(e) await ctx.send(f\"Sucessfully reloaded {value.__name__} \\nMain", "item): embed = discord.Embed(title = \"Testing Users:\", color = random.randint(0,", "resetcooldown(self, ctx, *, command = None): if not command: return", "await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?, ?)\", (name,", "testing guilds only)\") async def chunk_guild(self, ctx): if ctx.guild is", "await ctx.author.send(content=f\"Added text file to mystbin: \\n{paste.url}\") @channel_backup.error async def", "mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\")", "chunk_guild_error(self, ctx, error): await ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays the", "= 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True)", "None: return await ctx.send(\"You can't chunk a guild that doesn't", "await ctx.send(f\"Result of the input was {result}\") @commands.command(brief=\"a powerful owner", "guild for the purpose of testing purpose(it's owner only to", "(user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers:", "dates updated.\") async def servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds", "= None): if cog: try: self.bot.load_extension(cog) except Exception as e:", "cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief = \"leaves", "ctx.send(f\"Url of sent tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a", "only use when needed or really wanted. Otherwise no thanks.\")", "and then sends it into a file or mystbin\") async", "await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added {name} and {url} to", "= f\"User ID : {item[0]}\", value = f\"**Reason :** {item[1]}\",", "name = None, *, url = None): if not name", "url)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"added {name} and {url}", "await ctx.send(\"can't have a user be none.\") if user: await", "or really wanted. Otherwise no thanks.\") async def leave_guild(self, ctx,", "cogs\") @commands.command() async def shutdown(self, ctx): await ctx.send(\"shutdown/logout time happening.\")", "await ctx.send(\"You need to provide some attachments.\") await ctx.send(\"JDJG doesn't", "is False: await ctx.send(\"You can't use that command\") class ServersEmbed(menus.ListPageSource):", "= discord.Webhook.from_url(os.environ[\"webhook1\"], session = self.bot.session) embed=discord.Embed(title=\"Update\",color=(35056),timestamp=(ctx.message.created_at)) embed.add_field(name=\"Update Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's", "async def addsus(self, ctx, *, user: utils.BetterUserconverter = None): if", "mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None): user = user", "def mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None): user =", "ctx.send(f\"Removed tester known as {user}\") @commands.command(brief=\"A command to add testers\")", "utils.BetterUserconverter = None): user = user or ctx.author pag =", "return await ctx.send(\"Guild is None can't do anything.\") await ctx.send(\"Bot", "activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers | {len(self.bot.users)} users\")) @stats_status.error async def stats_status_error(self,", "self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester known as {user}\") @commands.command(brief=\"A command to", "color = random.randint(0, 16777215)) embed.add_field(name = \"User ID:\", value =", "or not name and not url: return await ctx.send(\"You need", "os.getenv('tweet_key') consumer_secret = os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token =", "ctx, *, args = None): if args is None:await ctx.send(\"Can't", "To:\",value=str(user)) await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *, cog", "g in user.mutual_guilds: pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for page in", "= None): cog = cog or \"all\" if cog ==", "1), delete_message_after = True) await menu.start(ctx) @commands.command() async def update_sus(self,", "is a DM.\") if ctx.guild.chunked: return await ctx.send(\"No need to", "self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list VALUES (?)\", (user.id,)) await self.bot.sus_users.commit()", "await cur.close() await ctx.send(f\"added {name} and {url} to the rtfm", "\"removes packages from the rtfm DB\", aliases = [\"remove_rtfm\"]) async", "not command: return await ctx.send(\"please specificy a command\") command_wanted =", "only) but with join dates updated.\") async def servers2(self, ctx):", "DB\") @commands.command(brief = \"removes packages from the rtfm DB\", aliases", "channel_backup_error(self, ctx, error): etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype,", "traceback.print_exc() await ctx.send(\"Loaded cog(see if there's any errors)\") if cog", "menu.start(ctx, channel = ctx.author.dm_channel) if await self.bot.is_owner(ctx.author) is False: await", "{new_line}Attachments : {msg.attachments}\" if msg.content else f\"{msg.author} ({('Bot' if msg.author.bot", "all cog_command_error @commands.command(brief=\"Changes Bot Status(Owner Only)\") async def status(self ,", "import utils import random , discord, os, importlib, mystbin, typing,", "[\"remove_rtfm\"]) async def removertfm(self, ctx, *, name = None): if", "e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see if there's any", "if user is None: await ctx.send(\"You can't have a none", "not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't have a cooldown/isn't on", "errors)\") else: try: self.bot.reload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e)", "in this guils so, I kept it here.\") webhook =", "= await self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc() return", "utils.BetterUserconverter = None, *, command = None): #make sure to", "return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error): if ctx.command", "if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list", "command to remove sus users.\") async def removesus(self, ctx, *,", "is None): await ctx.author.create_dm() await menu.start(ctx, channel = ctx.author.dm_channel) @commands.command(brief=\"A", "owner tool to reload local files that aren't reloadable.\") async", "for any errors)\") @commands.command() async def unload(self, ctx, *, cog", "lol)\") async def save_image(self, ctx): if not ctx.message.attachments: return await", "servers(self, ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g", "args = None): if await self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel,", "take any responbility for what you upload here :eyes: don't", "ctx.send(\"Appears not to have valid perms\") if isinstance(ctx.channel,discord.DMChannel): await ctx.send(\"You", "to mystbin: \\n{paste.url}\") @channel_backup.error async def channel_backup_error(self, ctx, error): etype", "command\") if not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't have a", "page = \"\\n\".join(f\"{msg.author} ({('Bot' if msg.author.bot else 'User')}) : {msg.content}", "True) await ctx.send(\"Finished chunking..\") @chunk_guild.error async def chunk_guild_error(self, ctx, error):", "(?)\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in", "await ctx.send(\"Finished chunking..\") @chunk_guild.error async def chunk_guild_error(self, ctx, error): await", "@commands.command(brief=\"a command to give a list of servers(owner only)\",help=\"Gives a", "commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog reloaded :D", "await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter =", "async def chunk_guild(self, ctx): if ctx.guild is None: return await", "cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name", "ctx): messages = await ctx.channel.history(limit = None, oldest_first = True).flatten()", "reload local files that aren't reloadable.\") async def reload_basic(self, ctx,", "ctx.guild is None: return await ctx.send(\"You can't chunk a guild", "rfm value {name}.\") @commands.command(brief = \"a command to save images", "a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of {command_wanted}\") @commands.command(brief =", "chunked\") await ctx.guild.chunk(cache = True) await ctx.send(\"Finished chunking..\") @chunk_guild.error async", "try: value=importlib.reload(module) except Exception as e: traceback.print_exc() return await ctx.send(e)", "ctx.send(f\"added tester known as {user}\") else: return await ctx.send(f\"{user} is", "user: utils.BetterUserconverter = None): if user is None: await ctx.send(\"can't", "self.bot.testers: return await ctx.send(f\"{user} isn't in the testers list.\") else:", "cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM sus_users WHERE user_id", "users.\") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed=discord.Embed(title =", "ctx): if await self.bot.is_owner(ctx.author): pag = commands.Paginator() for g in", "cooldown of {command_wanted}\") @commands.command(brief = \"leaves a guild only use", "{new_line}Attachments : {msg.attachments}\" for msg in messages) mystbin_client = mystbin.Client(session", "await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name = ?\",", "channel_backup(self, ctx): messages = await ctx.channel.history(limit = None, oldest_first =", "menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None):", "be used in testing guilds only)\") async def chunk_guild(self, ctx):", "if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO testers_list", "style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`)", "user be none.\") if user: await ctx.reply(\"Please give me a", "#make sure to swap to autoconverter if it gets added.", "can't use that in Dms.\") if await self.bot.is_owner(ctx.author) is False:", "value = f\"**Reason :** {item[1]}\", inline = False) return embed", "me a message to use.\") message = await self.bot.wait_for(\"message\",check =", "await cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users = tuple(await cursor.fetchall()) await", "\"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None): await user.create_dm() try: await user.send(embed=embed_message)", "traceback.print_exc() @commands.command() async def aioinput_test(self, ctx, *, args = None):", "ctx.message.attachments: return await ctx.send(\"You need to provide some attachments.\") await", "traceback.print_exc() await ctx.send(\"Cog reloaded :D (check for any errors)\") @commands.command()", "cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE FROM testers_list WHERE user_id", "ctx.send(\"You sadly can't use it like that.\") if await self.bot.is_owner(ctx.author)", "what you upload here :eyes: don't upload anything bad okay?\")", "= await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to remove", "Info:\",value=args) embed.set_author(name=\"<NAME>\",icon_url='https://i.imgur.com/pdQkCBv.png') embed.set_footer(text=\"JDJG's Updates\") await webhook.send(embed=embed) if args is None:", "commands, menus import utils import random , discord, os, importlib,", ": {msg.attachments}\" for msg in messages) mystbin_client = mystbin.Client(session =", "fine :D.(check any errors)\") if cog is None: await ctx.send(\"you", "file or mystbin\") async def channel_backup(self, ctx): messages = await", "ctx.send(\"changing status, check now....\") await self.bot.change_presence(status=discord.Status.online, activity=discord.Activity(type=discord.ActivityType.watching, name=f\"{len(self.bot.guilds)} servers |", "here :eyes: don't upload anything bad okay?\") for x in", "None): if name is None: return await ctx.send(\"You can't remove", "True) if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel", "utils.check(ctx)) cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO sus_users VALUES", "add sus_users with a reason\") async def addsus(self, ctx, *,", "for the {user} for one command usage!\") self.bot.special_access[user.id]=command_wanted.name if command_wanted", "ServersEmbed(menus.ListPageSource): async def format_page(self, menu, item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215))", "is in the testers list already!\") def tweepy_post(self, post_text =", "any errors)\") if cog is None: await ctx.send(\"you can't ask", "ctx, *, args = None): args = args or \"Test\"", "commands.Paginator() for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at,", "appears to be chunked\") await ctx.guild.chunk(cache = True) await ctx.send(\"Finished", "user or ctx.author pag = commands.Paginator() for g in user.mutual_guilds:", "@commands.command() async def unload(self, ctx, *, cog = None): if", "?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() if not user.id in", ":D (check for any errors)\") @commands.command() async def unload(self, ctx,", "await self.bot.get_channel(855217084710912050).send(embed=embed_message) @commands.command() async def load(self, ctx, *, cog =", "post_text) @commands.command(brief = \"sends tweet to JDBot Twitter\") async def", "*, args = None): args = args or \"Test\" result=await", "no cogs\") @commands.command() async def shutdown(self, ctx): await ctx.send(\"shutdown/logout time", "as {user}\") else: return await ctx.send(f\"{user} is in the testers", "works with JDJG, but this command is meant to send", "discord, os, importlib, mystbin, typing, aioimgur, functools, tweepy import traceback,", "can't use it like that.\") if await self.bot.is_owner(ctx.author) is False:", "format_page(self, menu, item): embed=discord.Embed(title = \"Users Deemed Suspicious by JDJG", "ask to reload no cogs\") @commands.command() async def shutdown(self, ctx):", "oldest_first = True).flatten() new_line = \"\\n\" page = \"\\n\".join(f\"{msg.author} ({('Bot'", "in pag.pages] menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None):", "guild, it appears to be chunked\") await ctx.guild.chunk(cache = True)", "guild status and user status immediately\") async def stats_status(self, ctx):", "as e: await ctx.send(e) traceback.print_exc() await ctx.send(\"Cog should be unloaded", "await self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource):", "just fine :D.(check any errors)\") if cog is None: await", "ctx.author if user: await ctx.reply(\"Please give me a message to", "= None): if user is None: await ctx.send(\"can't have a", "{command_wanted}\") @commands.command(brief = \"leaves a guild only use when needed", "self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users =", "if not command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't have a cooldown/isn't", "Inc. Official\", color = random.randint(0, 16777215)) embed.add_field(name = f\"User ID", "None): cog = cog or \"all\" if cog == \"all\":", "user = ctx.author if user: await ctx.reply(\"Please give me a", "= [\"reset_cooldown\"]) async def resetcooldown(self, ctx, *, command = None):", "to use.\") message = await self.bot.wait_for(\"message\",check = utils.check(ctx)) embed_message =", "None): if cog: try: self.bot.load_extension(cog) except Exception as e: await", "ctx, *, args = None): if not args: return await", "webhook.send(embed=embed) if args is None: await ctx.send(\"You sadly can't use", "try: await ctx.guild.me.edit(nick=name) except discord.Forbidden: await ctx.send(\"Appears not to have", "is None: return await ctx.send(\"You can't remove None\") cur =", "args: try: module = importlib.import_module(name=args) except Exception as e: traceback.print_exc()", "embed=discord.Embed(title = \"Users Deemed Suspicious by JDJG Inc. Official\", color", "should be unloaded just fine :D.(check any errors)\") if cog", "= ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after = True) await menu.start(ctx)", "None: await ctx.send(\"You can't have a non existent user.\") if", "self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id) await", "= user or ctx.author pag = commands.Paginator() for g in", "await cur.execute(\"DELETE FROM RTFM_DICTIONARY WHERE name = ?\", (name,)) await", "if there's any errors)\") if cog is None: await ctx.send(\"you", "inline = False) return embed @commands.command(brief=\"a command to grab all", "= f\"{item}\", inline = False) return embed @commands.command(brief = \"a", "list already!\") def tweepy_post(self, post_text = None): consumer_key = os.getenv('tweet_key')", "if (ctx.author.dm_channel is None): await ctx.author.create_dm() await menu.start(ctx, channel =", "return embed @commands.command(brief=\"a command to grab all in the sus_users", "url.\") cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES", "pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')} {discord.utils.format_dt(g.me.joined_at, style = 'T')} \\n[{len(g.members)}/{g.member_count}] **{g.name}**", "await ctx.send(f\"added tester known as {user}\") else: return await ctx.send(f\"{user}", "to see what guilds a person is in.\") async def", "traceback.print_exc() return await ctx.send(f\"Exception occured at {e}\") await ctx.send(f\"Url of", "cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await", "await ctx.send(\"You can't remove None\") cur = await self.bot.sus_users.cursor() await", "= tuple(await cursor.fetchall()) await cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users,", "use it like that.\") if await self.bot.is_owner(ctx.author) is False: await", "ctx.message.delete() except: await ctx.send(\"It couldn't delete the message in this", "user.create_dm() try: await user.send(embed=embed_message) except: user = ctx.author await user.send(content=\"Message", "command_wanted.is_on_cooldown(ctx): return await ctx.send(\"That doesn't have a cooldown/isn't on a", "return await ctx.send(f\"{user} is in the testers list already!\") def", "{user}\") @commands.command(brief=\"A command to add testers\") async def add_tester(self, ctx,", "a user be none.\") if user: await ctx.reply(\"Please give me", "menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after = True) if (ctx.author.dm_channel", "trace))) pages = textwrap.wrap(values, width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages,", "= os.getenv('tweet_secret') auth = tweepy.OAuthHandler(consumer_key, consumer_secret) access_token = os.getenv('tweet_access') access_secret", "this command is meant to send updates to my webhook\")", "await ctx.send(\"done reloading all cogs(check for any errors)\") else: try:", "ctx.send(\"Cog should be unloaded just fine :D.(check any errors)\") if", "error): await ctx.send(error) traceback.print_exc() @commands.command(brief = \"displays the guild status", "FROM RTFM_DICTIONARY WHERE name = ?\", (name,)) await self.bot.sus_users.commit() await", "menu = ViewMenuPages(self.ServersEmbed(pages, per_page=1),delete_message_after=True) if (ctx.author.dm_channel is None): await ctx.author.create_dm()", "*, cog = None): if cog: try: self.bot.load_extension(cog) except Exception", "error): await ctx.send(error) class TestersEmbed(menus.ListPageSource): async def format_page(self, menu, item):", "if command: command_wanted=self.bot.get_command(command) if command_wanted: await ctx.send(f\"{command_wanted.name} now accessible for", "user status immediately\") async def stats_status(self, ctx): await ctx.send(\"changing status,", "await ctx.send(e) traceback.print_exc() await ctx.send(\"Loaded cog(see if there's any errors)\")", "cur.close() await self.bot.sus_users.commit() menu = ViewMenuPages(self.SusUsersEmbed(sus_users, per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error", "= None): if user is None: await ctx.send(\"You can't have", "mystbin: \\n{paste.url}\") @channel_backup.error async def channel_backup_error(self, ctx, error): etype =", "ctx): if ctx.guild is None: return await ctx.send(\"You can't chunk", "if args: try: module = importlib.import_module(name=args) except Exception as e:", "or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for page in pag.pages] menu", "False: await ctx.send(\"You can't use that it's owner only\") def", "self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that it's owner", "pag = commands.Paginator() for g in user.mutual_guilds: pag.add_line(f\"{g}\") pages =", "item): embed=discord.Embed(title = \"Users Deemed Suspicious by JDJG Inc. Official\",", "await cur.execute(\"DELETE FROM testers_list WHERE user_id = ?\", (user.id,)) await", "f\"{item}\", inline = False) return embed @commands.command(brief = \"a command", "if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that\")", "is meant to send updates to my webhook\") async def", "known as {user}\") @commands.command(brief=\"A command to add testers\") async def", "x in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e:", "await ctx.send(\"That's an owner only command\") @commands.command(brief=\"Only owner command to", "await self.bot.sus_users.commit() await cur.close() if not user.id in self.bot.testers: self.bot.testers.append(user.id)", "no thanks.\") async def leave_guild(self, ctx, *, guild: typing.Optional[discord.Guild] =", "mystbin_client = mystbin.Client(session = self.bot.session) paste = await mystbin_client.post(page) await", "\\nMain Package: {value.__package__}\") @commands.command(brief=\"backs up a channel and then sends", "async def send_tweet(self, ctx, *, args = None): if not", "async def mutualguilds(self, ctx, *, user: utils.BetterUserconverter = None): user", "None\") if args: try: module = importlib.import_module(name=args) except Exception as", "try: self.bot.load_extension(cog) except Exception as e: await ctx.send(e) traceback.print_exc() await", "await mystbin_client.post(page) await ctx.author.send(content=f\"Added text file to mystbin: \\n{paste.url}\") @channel_backup.error", "await ctx.send(error) traceback.print_exc() #I need to fix all cog_command_error @commands.command(brief=\"Changes", "= None): guild = guild or ctx.guild if guild is", "accessible for the {user} for one command usage!\") self.bot.special_access[user.id]=command_wanted.name if", "self.bot.session) paste = await mystbin_client.post(page) await ctx.author.send(content=f\"Added text file to", "os.getenv('tweet_token') auth.set_access_token(access_token, access_secret) twitter_api = tweepy.API(auth) return twitter_api.update_status(status = post_text)", "error): await ctx.send(error) @commands.command(brief=\"a command to give a list of", "command to grab all in the sus_users list\") async def", "of {command_wanted}\") @commands.command(brief = \"leaves a guild only use when", "await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error): await ctx.send(error)", "leaving guild :(\") try: await guild.leave() except Exception as e:", "that.\") if await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use", "have a user be none.\") if user: await ctx.reply(\"Please give", "ctx.command or not ctx.command.has_error_handler(): await ctx.send(error) traceback.print_exc() #I need to", "as {user}\") @commands.command(brief=\"A command to add testers\") async def add_tester(self,", "is None: await ctx.send(\"select a command :(\") @commands.command(brief = \"resets", "= cog or \"all\" if cog == \"all\": for x", "async def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def", "to twitter.\") try: tweet_time = functools.partial(self.tweepy_post, args) post = await", "ctx.send(\"That doesn't have a cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx) await", "can't ask to load no cogs.\") @commands.command() async def reload(self,", "for the purpose of testing purpose(it's owner only to be", "async def format_page(self, menu, item): embed=discord.Embed(title = \"Users Deemed Suspicious", "await ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource): async def format_page(self, menu,", "async def stats_status(self, ctx): await ctx.send(\"changing status, check now....\") await", "async def aioinput_test(self, ctx, *, args = None): args =", "ctx.send(\"You can't use that\") @commands.command(brief=\"Commands to see what guilds a", "= \"chunks a guild for the purpose of testing purpose(it's", "ctx.send(\"It couldn't delete the message in this guils so, I", "await self.bot.is_owner(ctx.author) is False: await ctx.send(\"You can't use that\") @commands.command(brief=\"Commands", "have a none user.\") if user: cur = await self.bot.sus_users.cursor()", "self.bot.sus_users.commit() await cur.close() await ctx.send(\"Removed sus users.\") class SusUsersEmbed(menus.ListPageSource): async", "join dates updated.\") async def servers2(self, ctx): if await self.bot.is_owner(ctx.author):", "return await ctx.send(\"You need to provide some attachments.\") await ctx.send(\"JDJG", "= await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds packages", "item): embed = discord.Embed(title=\"Servers:\",description=item,color=random.randint(0, 16777215)) return embed @commands.command(brief=\"a command to", "images to imgur(for owner only lol)\") async def save_image(self, ctx):", "try: self.bot.unload_extension(cog) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await", "ctx.send(error) @commands.command(aliases=[\"bypass_command\"]) async def command_bypass(self, ctx ,user: utils.BetterUserconverter = None,", "save_image(self, ctx): if not ctx.message.attachments: return await ctx.send(\"You need to", "what guilds a person is in.\") async def mutualguilds(self, ctx,", "need a name and also url.\") cur = await self.bot.sus_users.cursor()", "await ctx.send(\"please specify a command\") if not command_wanted.is_on_cooldown(ctx): return await", "testers_list WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close()", "await ctx.send(f\"{user} is in the testers list already!\") def tweepy_post(self,", "await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at) pag =", "servers2(self, ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild:", "({('Bot' if msg.author.bot else 'User')}) : {new_line.join(f'{e.to_dict()}' for e in", "\"all\" if cog == \"all\": for x in list(self.bot.extensions): try:", "messages = await ctx.channel.history(limit = None, oldest_first = True).flatten() new_line", "cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO RTFM_DICTIONARY VALUES (?,", "menus import utils import random , discord, os, importlib, mystbin,", "cooldown/isn't on a cooldown.\") command_wanted.reset_cooldown(ctx) await ctx.send(f\"reset cooldown of {command_wanted}\")", "await ctx.send(\"Updated SQL boss.\") @update_sus.error async def update_sus_error(self, ctx, error):", "* , args=None): if await self.bot.is_owner(ctx.author): if args: await self.bot.change_presence(status=discord.Status.do_not_disturb,", "is False: await ctx.send(\"You can't use that it's owner only\")", "not url: return await ctx.send(\"You need a name and also", "ctx.send(f\"Somehow an error occured: {e}\") traceback.print_exc() @commands.command() async def aioinput_test(self,", "ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading all cogs(check for any errors)\")", "input, (f\"{args}:\")) await ctx.send(f\"Result of the input was {result}\") @commands.command(brief=\"a", "= None, *, url = None): if not name or", "per_page=1),delete_message_after=True) await menu.start(ctx) @sus_users.error async def sus_users_error(self, ctx, error): await", "and also url.\") cur = await self.bot.sus_users.cursor() await cur.execute(\"INSERT INTO", "servers\"] menu = ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is", "def format_page(self, menu, item): embed = discord.Embed(title = \"Testing Users:\",", "pag.add_line(f\"[{len(g.members)}/{g.member_count}] **{g.name}** (`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\") pages = [page.strip(\"`\")", "to my webhook\") async def webhook_update(self, ctx, *, args =", "is None can't do anything.\") await ctx.send(\"Bot leaving guild :(\")", "ViewMenuPages(utils.mutualGuildsEmbed(pages, per_page=1),delete_message_after = True) if (ctx.author.dm_channel is None): await ctx.author.create_dm()", "SQL boss.\") @update_sus.error async def update_sus_error(self, ctx, error): await ctx.send(error)", "async def removertfm(self, ctx, *, name = None): if name", "embed @commands.command(brief=\"a command to grab all in the sus_users list\")", "ctx.send(\"You can't use that it's owner only\") @commands.command(brief=\"only works with", "cog_check(self, ctx): return await self.bot.is_owner(ctx.author) async def cog_command_error(self, ctx, error):", "@commands.command(brief = \"chunks a guild for the purpose of testing", "ctx ,*, name=None): if await self.bot.is_owner(ctx.author): if isinstance(ctx.channel, discord.TextChannel): await", "(`{g.id}`) | {(g.system_channel or g.text_channels[0]).mention}\") pages = [page.strip(\"`\") for page", "discord.TextChannel): try: await ctx.message.delete() except: await ctx.send(\"It couldn't delete the", "cogs.\") @commands.command() async def reload(self, ctx, *, cog = None):", "ctx.send(\"JDJG doesn't take any responbility for what you upload here", "?\", (name,)) await self.bot.sus_users.commit() await cur.close() await ctx.send(f\"Removed the rfm", "@channel_backup.error async def channel_backup_error(self, ctx, error): etype = type(error) trace", "WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit() await cur.close() await", "rtfm DB\", aliases = [\"remove_rtfm\"]) async def removertfm(self, ctx, *,", "paste = await mystbin_client.post(values) await ctx.send(f\"Traceback: {paste.url}\") @commands.command(brief = \"adds", "as e: traceback.print_exc() return await ctx.send(e) imgur_client= aioimgur.ImgurClient(os.environ[\"imgur_id\"], os.environ[\"imgur_secret\"]) imgur_url", "= \"Users Deemed Suspicious by JDJG Inc. Official\", color =", "self.bot = bot @commands.command(brief=\"a command to send mail\") async def", "await self.bot.sus_users.cursor() cursor = await cur.execute(\"SELECT * FROM SUS_USERS;\") sus_users", "cur.execute(\"DELETE FROM testers_list WHERE user_id = ?\", (user.id,)) await self.bot.sus_users.commit()", "if user is None: await ctx.send(\"can't have a user be", "await cur.close() if not user.id in self.bot.testers: return await ctx.send(f\"{user}", "none user.\") if user: cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE", "embed_message.set_thumbnail(url = \"https://i.imgur.com/1XvDnqC.png\") if (user.dm_channel is None): await user.create_dm() try:", "try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e) traceback.print_exc() await", "False: await ctx.send(\"You can't use that it's owner only\") @commands.command(brief=\"only", "pages = [page.strip(\"`\") for page in pag.pages] pages = pages", "to remove sus users.\") async def removesus(self, ctx, *, user:", "width = 1992) menu = ViewMenuPages(utils.ErrorEmbed(pages, per_page = 1),delete_message_after =", "some attachments.\") await ctx.send(\"JDJG doesn't take any responbility for what", ":eyes: don't upload anything bad okay?\") for x in ctx.message.attachments:", "| {(g.system_channel or g.text_channels[0]).mention}\\n\") pages = [page.strip(\"`\") for page in", "ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\") @update_sus.error async def", "@commands.command() async def update_sus(self, ctx): await self.bot.sus_users.commit() await ctx.send(\"Updated SQL", "in ctx.message.attachments: try: discord.utils._get_mime_type_for_image(await x.read()) except Exception as e: traceback.print_exc()", "tweet is: https://twitter.com/twitter/statuses/{post.id}\") @commands.command(brief = \"chunks a guild for the", "list of servers(owner only)\",help=\"Gives a list of guilds(Bot Owners only)", "for e in msg.embeds)} {new_line}Attachments : {msg.attachments}\" for msg in", "etype = type(error) trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages", "@stats_status.error async def stats_status_error(self, ctx, error): await ctx.send(error) @commands.command(brief=\"a command", "return await ctx.send(\"please specify a command\") if not command_wanted.is_on_cooldown(ctx): return", "if args: await self.bot.change_presence(status=discord.Status.do_not_disturb, activity= discord.Activity(type=discord.ActivityType.watching,name=args)) if args is None:", "None): if not command: return await ctx.send(\"please specificy a command\")", "testers(self, ctx): menu = ViewMenuPages(self.TestersEmbed(self.bot.testers, per_page = 1), delete_message_after =", "utils.BetterUserconverter = None): if user is None: await ctx.reply(\"User not", "@commands.command(brief = \"sends tweet to JDBot Twitter\") async def send_tweet(self,", "def shutdown(self, ctx): await ctx.send(\"shutdown/logout time happening.\") await self.bot.close() async", "not args: return await ctx.send(\"you can't send nothing to twitter.\")", "ctx): if await self.bot.is_owner(ctx.author): sorted_guilds = sorted(self.bot.guilds, key=lambda guild: guild.me.joined_at)", "msg.embeds)} {new_line}Attachments : {msg.attachments}\" for msg in messages) mystbin_client =", "ctx.send(f\"Exception occured at {e}\") await ctx.send(f\"Url of sent tweet is:", "values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width = 1992) menu", "ctx.send(\"You can't have a non existent user.\") if user: cur", "to send updates to my webhook\") async def webhook_update(self, ctx,", "self.bot.sus_users.commit() await ctx.send(\"Updated SQL boss.\") @update_sus.error async def update_sus_error(self, ctx,", "return await ctx.send(f\"Exception occured at {e}\") await ctx.send(f\"Url of sent", "isn't in the testers list.\") else: self.bot.testers.remove(user.id) await ctx.send(f\"Removed tester", "do anything.\") await ctx.send(\"Bot leaving guild :(\") try: await guild.leave()", "pag.add_line(f\"{g}\") pages = [page.strip(\"`\") for page in pag.pages] pages =", "self.bot.loop.run_in_executor(None, tweet_time) except Exception as e: traceback.print_exc() return await ctx.send(f\"Exception", "user is None: await ctx.send(\"can't have a user be none.\")", "to be chunked\") await ctx.guild.chunk(cache = True) await ctx.send(\"Finished chunking..\")", "of the input was {result}\") @commands.command(brief=\"a powerful owner tool to", "ctx.author.dm_channel) mystbin_client = mystbin.Client(session=self.bot.session) paste = await mystbin_client.post(values) await ctx.send(f\"Traceback:", "boss.\") @update_sus.error async def update_sus_error(self, ctx, error): await ctx.send(error) @commands.command(aliases=[\"bypass_command\"])", "imgur_url = await imgur_client.upload(await x.read()) await ctx.send(f\"{imgur_url['link']}\") @commands.command(brief=\"A command to", "e: await ctx.send(e) traceback.print_exc() await ctx.send(\"done reloading all cogs(check for", "in list(self.bot.extensions): try: self.bot.reload_extension(x) except commands.errors.ExtensionError as e: await ctx.send(e)", "= None, *, command = None): #make sure to swap", "ctx.send(\"can't have a user be none.\") if user: await ctx.reply(\"Please", "async def format_page(self, menu, item): embed = discord.Embed(title = \"Testing", "ctx.send(\"You can't remove None\") cur = await self.bot.sus_users.cursor() await cur.execute(\"DELETE", "= error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width =", "me a reason why:\") reason = await self.bot.wait_for(\"message\",check= utils.check(ctx)) cur", "the input was {result}\") @commands.command(brief=\"a powerful owner tool to reload", "self.bot.is_owner(ctx.author): if args: if isinstance(ctx.channel, discord.TextChannel): try: await ctx.message.delete() except:", ": {msg.attachments}\" if msg.content else f\"{msg.author} ({('Bot' if msg.author.bot else", "trace = error.__traceback__ values=''.join(map(str,traceback.format_exception(etype, error, trace))) pages = textwrap.wrap(values, width", "= commands.Paginator() for g in sorted_guilds: pag.add_line(f\"{discord.utils.format_dt(g.me.joined_at, style = 'd')}" ]
[ "z.conjugate()), # or functions that are simply not defined. #", "re = Function(name = 're', arguments = ('z',), expression =", "= Function(name = 'csc', arguments = ('z',), expression = '1./cmath.sin(z)')", "arguments = ('z',), expression = '1./cmath.sin(z)') acsc = Function(name =", "'re', arguments = ('z',), expression = 'z.real') im = Function(name", "extensions of the cmath library, and correspond # either to", "are in cmath, but inconvenient # to access from there", "cmath # complexconjugate = Function(name = 'complexconjugate', arguments = ('z',),", "'im', arguments = ('z',), expression = 'z.imag') # New functions", "This file is part of the UFO. # # This", "arguments = ('z',), expression = 'z.imag') # New functions (trigonometric)", "= ('z',), expression = '1./cmath.sin(z)') acsc = Function(name = 'acsc',", "that are simply not defined. # # from __future__ import", "for functions from cmath # complexconjugate = Function(name = 'complexconjugate',", "cmath, but inconvenient # to access from there (e.g. z.conjugate()),", "# This file is part of the UFO. # #", "library, and correspond # either to functions that are in", "('z',), expression = '1./cmath.sin(z)') acsc = Function(name = 'acsc', arguments", "defined. # # from __future__ import absolute_import __date__ = \"22", "contains definitions for functions that # are extensions of the", "definitions for functions that # are extensions of the cmath", "'sec', arguments = ('z',), expression = '1./cmath.cos(z)') asec = Function(name", "complexconjugate = Function(name = 'complexconjugate', arguments = ('z',), expression =", "# complexconjugate = Function(name = 'complexconjugate', arguments = ('z',), expression", "= 'z.imag') # New functions (trigonometric) sec = Function(name =", "file is part of the UFO. # # This file", "cmath from .object_library import all_functions, Function # # shortcuts for", ".object_library import all_functions, Function # # shortcuts for functions from", "correspond # either to functions that are in cmath, but", "of the cmath library, and correspond # either to functions", "# shortcuts for functions from cmath # complexconjugate = Function(name", "= 'sec', arguments = ('z',), expression = '1./cmath.cos(z)') asec =", "# either to functions that are in cmath, but inconvenient", "expression = 'z.imag') # New functions (trigonometric) sec = Function(name", "arguments = ('z',), expression = 'cmath.acos(1./z)') csc = Function(name =", "'complexconjugate', arguments = ('z',), expression = 'z.conjugate()') re = Function(name", "not defined. # # from __future__ import absolute_import __date__ =", "# # shortcuts for functions from cmath # complexconjugate =", "functions (trigonometric) sec = Function(name = 'sec', arguments = ('z',),", "but inconvenient # to access from there (e.g. z.conjugate()), #", "there (e.g. z.conjugate()), # or functions that are simply not", "Function(name = 'im', arguments = ('z',), expression = 'z.imag') #", "= ('z',), expression = 'z.imag') # New functions (trigonometric) sec", "Function(name = 'sec', arguments = ('z',), expression = '1./cmath.cos(z)') asec", "= ('z',), expression = 'cmath.acos(1./z)') csc = Function(name = 'csc',", "= 'asec', arguments = ('z',), expression = 'cmath.acos(1./z)') csc =", "= '1./cmath.cos(z)') asec = Function(name = 'asec', arguments = ('z',),", "= 'z.real') im = Function(name = 'im', arguments = ('z',),", "2010\" __author__ = \"<EMAIL>\" import cmath from .object_library import all_functions,", "from cmath # complexconjugate = Function(name = 'complexconjugate', arguments =", "'z.real') im = Function(name = 'im', arguments = ('z',), expression", "# from __future__ import absolute_import __date__ = \"22 July 2010\"", "= Function(name = 're', arguments = ('z',), expression = 'z.real')", "July 2010\" __author__ = \"<EMAIL>\" import cmath from .object_library import", "('z',), expression = 'z.imag') # New functions (trigonometric) sec =", "sec = Function(name = 'sec', arguments = ('z',), expression =", "expression = '1./cmath.cos(z)') asec = Function(name = 'asec', arguments =", "# are extensions of the cmath library, and correspond #", "# # from __future__ import absolute_import __date__ = \"22 July", "__author__ = \"<EMAIL>\" import cmath from .object_library import all_functions, Function", "'z.conjugate()') re = Function(name = 're', arguments = ('z',), expression", "expression = 'cmath.acos(1./z)') csc = Function(name = 'csc', arguments =", "the UFO. # # This file contains definitions for functions", "absolute_import __date__ = \"22 July 2010\" __author__ = \"<EMAIL>\" import", "to access from there (e.g. z.conjugate()), # or functions that", "Function(name = 're', arguments = ('z',), expression = 'z.real') im", "cmath library, and correspond # either to functions that are", "= Function(name = 'acsc', arguments = ('z',), expression = 'cmath.asin(1./z)')", "are extensions of the cmath library, and correspond # either", "shortcuts for functions from cmath # complexconjugate = Function(name =", "= Function(name = 'sec', arguments = ('z',), expression = '1./cmath.cos(z)')", "or functions that are simply not defined. # # from", "'csc', arguments = ('z',), expression = '1./cmath.sin(z)') acsc = Function(name", "# New functions (trigonometric) sec = Function(name = 'sec', arguments", "the cmath library, and correspond # either to functions that", "= ('z',), expression = '1./cmath.cos(z)') asec = Function(name = 'asec',", "= \"<EMAIL>\" import cmath from .object_library import all_functions, Function #", "import cmath from .object_library import all_functions, Function # # shortcuts", "= \"22 July 2010\" __author__ = \"<EMAIL>\" import cmath from", "'asec', arguments = ('z',), expression = 'cmath.acos(1./z)') csc = Function(name", "= Function(name = 'complexconjugate', arguments = ('z',), expression = 'z.conjugate()')", "= Function(name = 'im', arguments = ('z',), expression = 'z.imag')", "= 'z.conjugate()') re = Function(name = 're', arguments = ('z',),", "part of the UFO. # # This file contains definitions", "in cmath, but inconvenient # to access from there (e.g.", "('z',), expression = '1./cmath.cos(z)') asec = Function(name = 'asec', arguments", "# to access from there (e.g. z.conjugate()), # or functions", "import absolute_import __date__ = \"22 July 2010\" __author__ = \"<EMAIL>\"", "functions from cmath # complexconjugate = Function(name = 'complexconjugate', arguments", "= '1./cmath.sin(z)') acsc = Function(name = 'acsc', arguments = ('z',),", "functions that # are extensions of the cmath library, and", "access from there (e.g. z.conjugate()), # or functions that are", "are simply not defined. # # from __future__ import absolute_import", "('z',), expression = 'cmath.acos(1./z)') csc = Function(name = 'csc', arguments", "= 'complexconjugate', arguments = ('z',), expression = 'z.conjugate()') re =", "expression = '1./cmath.sin(z)') acsc = Function(name = 'acsc', arguments =", "im = Function(name = 'im', arguments = ('z',), expression =", "= 'im', arguments = ('z',), expression = 'z.imag') # New", "\"<EMAIL>\" import cmath from .object_library import all_functions, Function # #", "Function(name = 'complexconjugate', arguments = ('z',), expression = 'z.conjugate()') re", "functions that are in cmath, but inconvenient # to access", "Function # # shortcuts for functions from cmath # complexconjugate", "'1./cmath.cos(z)') asec = Function(name = 'asec', arguments = ('z',), expression", "to functions that are in cmath, but inconvenient # to", "for functions that # are extensions of the cmath library,", "<gh_stars>1-10 # This file is part of the UFO. #", "from there (e.g. z.conjugate()), # or functions that are simply", "= Function(name = 'asec', arguments = ('z',), expression = 'cmath.acos(1./z)')", "simply not defined. # # from __future__ import absolute_import __date__", "csc = Function(name = 'csc', arguments = ('z',), expression =", "__future__ import absolute_import __date__ = \"22 July 2010\" __author__ =", "(trigonometric) sec = Function(name = 'sec', arguments = ('z',), expression", "# # This file contains definitions for functions that #", "= 'csc', arguments = ('z',), expression = '1./cmath.sin(z)') acsc =", "that are in cmath, but inconvenient # to access from", "# This file contains definitions for functions that # are", "= ('z',), expression = 'z.real') im = Function(name = 'im',", "expression = 'z.conjugate()') re = Function(name = 're', arguments =", "'cmath.acos(1./z)') csc = Function(name = 'csc', arguments = ('z',), expression", "__date__ = \"22 July 2010\" __author__ = \"<EMAIL>\" import cmath", "('z',), expression = 'z.conjugate()') re = Function(name = 're', arguments", "expression = 'z.real') im = Function(name = 'im', arguments =", "asec = Function(name = 'asec', arguments = ('z',), expression =", "New functions (trigonometric) sec = Function(name = 'sec', arguments =", "'1./cmath.sin(z)') acsc = Function(name = 'acsc', arguments = ('z',), expression", "'z.imag') # New functions (trigonometric) sec = Function(name = 'sec',", "and correspond # either to functions that are in cmath,", "= 'cmath.acos(1./z)') csc = Function(name = 'csc', arguments = ('z',),", "# or functions that are simply not defined. # #", "UFO. # # This file contains definitions for functions that", "all_functions, Function # # shortcuts for functions from cmath #", "(e.g. z.conjugate()), # or functions that are simply not defined.", "functions that are simply not defined. # # from __future__", "from __future__ import absolute_import __date__ = \"22 July 2010\" __author__", "arguments = ('z',), expression = 'z.conjugate()') re = Function(name =", "Function(name = 'asec', arguments = ('z',), expression = 'cmath.acos(1./z)') csc", "\"22 July 2010\" __author__ = \"<EMAIL>\" import cmath from .object_library", "that # are extensions of the cmath library, and correspond", "import all_functions, Function # # shortcuts for functions from cmath", "= ('z',), expression = 'z.conjugate()') re = Function(name = 're',", "('z',), expression = 'z.real') im = Function(name = 'im', arguments", "either to functions that are in cmath, but inconvenient #", "acsc = Function(name = 'acsc', arguments = ('z',), expression =", "inconvenient # to access from there (e.g. z.conjugate()), # or", "arguments = ('z',), expression = 'z.real') im = Function(name =", "arguments = ('z',), expression = '1./cmath.cos(z)') asec = Function(name =", "of the UFO. # # This file contains definitions for", "file contains definitions for functions that # are extensions of", "This file contains definitions for functions that # are extensions", "from .object_library import all_functions, Function # # shortcuts for functions", "Function(name = 'csc', arguments = ('z',), expression = '1./cmath.sin(z)') acsc", "is part of the UFO. # # This file contains", "= 're', arguments = ('z',), expression = 'z.real') im =" ]
[ "= current_version if server_versions is not None: cli_config.server_versions = server_versions", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "count = cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY: return True", "from distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager", "# pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig", "# # Licensed under the Apache License, Version 2.0 (the", "compliance with the License. # You may obtain a copy", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "2.0 (the \"License\"); # you may not use this file", "agreed to in writing, software # distributed under the License", "# Copyright 2019 Polyaxon, Inc. # # Licensed under the", "file except in compliance with the License. # You may", "Unless required by applicable law or agreed to in writing,", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "if count > cls.FREQUENCY: return True config = cls.get_config_or_default() if", "or config.min_version is None: return True return LooseVersion(config.current_version) < LooseVersion(config.min_version)", "cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY: return True config =", "None: cli_config.current_version = current_version if server_versions is not None: cli_config.server_versions", "distributed under the License is distributed on an \"AS IS\"", "coding: utf-8 from __future__ import absolute_import, division, print_function from distutils.version", "from __future__ import absolute_import, division, print_function from distutils.version import LooseVersion", "\"\"\"Manages access cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME", "log_handler]): return cli_config = cls.get_config_or_default() if check_count is not None:", "config = cls.get_config_or_default() if config.current_version is None or config.min_version is", "should_check(cls): count = cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY: return", "log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls): count = cls._get_count()", "the specific language governing permissions and # limitations under the", "@classmethod def should_check(cls): count = cls._get_count() cls.reset(check_count=count) if count >", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration .polyaxoncli file.\"\"\"", "1 @classmethod def reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ):", "cls.reset(check_count=count) if count > cls.FREQUENCY: return True config = cls.get_config_or_default()", "express or implied. # See the License for the specific", "applicable law or agreed to in writing, software # distributed", "except in compliance with the License. # You may obtain", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "cli_config.server_versions = server_versions if log_handler is not None: cli_config.log_handler =", "is not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls): count", "writing, software # distributed under the License is distributed on", "in writing, software # distributed under the License is distributed", "if log_handler is not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return", "3 @classmethod def _get_count(cls): config = cls.get_config_or_default() return config.check_count +", "FREQUENCY = 3 @classmethod def _get_count(cls): config = cls.get_config_or_default() return", "you may not use this file except in compliance with", "return config.check_count + 1 @classmethod def reset( cls, check_count=None, current_version=None,", "cls.get_config_or_default() if config.current_version is None or config.min_version is None: return", "if server_versions is not None: cli_config.server_versions = server_versions if log_handler", "= check_count if current_version is not None: cli_config.current_version = current_version", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "check_count is not None: cli_config.check_count = check_count if current_version is", "import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration", "import absolute_import, division, print_function from distutils.version import LooseVersion # pylint:disable=import-error", "print_function from distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import", "not None: cli_config.current_version = current_version if server_versions is not None:", "CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL = True", "use this file except in compliance with the License. #", "_get_count(cls): config = cls.get_config_or_default() return config.check_count + 1 @classmethod def", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "= cls.get_config_or_default() return config.check_count + 1 @classmethod def reset( cls,", "polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration .polyaxoncli", "+ 1 @classmethod def reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None,", "= cls.get_config_or_default() if check_count is not None: cli_config.check_count = check_count", "polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages", "# coding: utf-8 from __future__ import absolute_import, division, print_function from", "CONDITIONS OF ANY KIND, either express or implied. # See", "cli_config = cls.get_config_or_default() if check_count is not None: cli_config.check_count =", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "= server_versions if log_handler is not None: cli_config.log_handler = log_handler", "or implied. # See the License for the specific language", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "log_handler is not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config", "= True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY =", "License. # You may obtain a copy of the License", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "License, Version 2.0 (the \"License\"); # you may not use", "# limitations under the License. # coding: utf-8 from __future__", "pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class", "# You may obtain a copy of the License at", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "server_versions, log_handler]): return cli_config = cls.get_config_or_default() if check_count is not", "cli_config.current_version = current_version if server_versions is not None: cli_config.server_versions =", "is not None: cli_config.server_versions = server_versions if log_handler is not", "None or config.min_version is None: return True return LooseVersion(config.current_version) <", "CONFIG = CliConfigurationConfig FREQUENCY = 3 @classmethod def _get_count(cls): config", "CliConfigurationConfig FREQUENCY = 3 @classmethod def _get_count(cls): config = cls.get_config_or_default()", "under the License is distributed on an \"AS IS\" BASIS,", "True config = cls.get_config_or_default() if config.current_version is None or config.min_version", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "governing permissions and # limitations under the License. # coding:", "License for the specific language governing permissions and # limitations", "\".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY = 3 @classmethod def _get_count(cls):", "CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY = 3 @classmethod", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "def reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ): if not", "cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ): if not any([check_count, current_version,", "None: cli_config.check_count = check_count if current_version is not None: cli_config.current_version", "Polyaxon, Inc. # # Licensed under the Apache License, Version", "#!/usr/bin/python # # Copyright 2019 Polyaxon, Inc. # # Licensed", "config = cls.get_config_or_default() return config.check_count + 1 @classmethod def reset(", "server_versions if log_handler is not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config)", "is not None: cli_config.current_version = current_version if server_versions is not", "<gh_stars>0 #!/usr/bin/python # # Copyright 2019 Polyaxon, Inc. # #", "# # Copyright 2019 Polyaxon, Inc. # # Licensed under", "if not any([check_count, current_version, server_versions, log_handler]): return cli_config = cls.get_config_or_default()", "the License for the specific language governing permissions and #", "permissions and # limitations under the License. # coding: utf-8", "import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access", "(the \"License\"); # you may not use this file except", "Apache License, Version 2.0 (the \"License\"); # you may not", "# you may not use this file except in compliance", "cls.get_config_or_default() return config.check_count + 1 @classmethod def reset( cls, check_count=None,", "server_versions=None, log_handler=None, ): if not any([check_count, current_version, server_versions, log_handler]): return", "either express or implied. # See the License for the", "cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME = \".polyaxoncli\"", "None: cli_config.server_versions = server_versions if log_handler is not None: cli_config.log_handler", "= cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY: return True config", "OR CONDITIONS OF ANY KIND, either express or implied. #", "if config.current_version is None or config.min_version is None: return True", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "log_handler=None, ): if not any([check_count, current_version, server_versions, log_handler]): return cli_config", "division, print_function from distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base", "the License is distributed on an \"AS IS\" BASIS, #", "not None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def", "cli_config @classmethod def should_check(cls): count = cls._get_count() cls.reset(check_count=count) if count", "in compliance with the License. # You may obtain a", "class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL =", "any([check_count, current_version, server_versions, log_handler]): return cli_config = cls.get_config_or_default() if check_count", "software # distributed under the License is distributed on an", "is None or config.min_version is None: return True return LooseVersion(config.current_version)", "return cli_config @classmethod def should_check(cls): count = cls._get_count() cls.reset(check_count=count) if", "absolute_import, division, print_function from distutils.version import LooseVersion # pylint:disable=import-error from", "= log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls): count =", "# # Unless required by applicable law or agreed to", "BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli", "current_version, server_versions, log_handler]): return cli_config = cls.get_config_or_default() if check_count is", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "cls.get_config_or_default() if check_count is not None: cli_config.check_count = check_count if", ".polyaxoncli file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG =", "Version 2.0 (the \"License\"); # you may not use this", "server_versions is not None: cli_config.server_versions = server_versions if log_handler is", "file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG = CliConfigurationConfig", "__future__ import absolute_import, division, print_function from distutils.version import LooseVersion #", "law or agreed to in writing, software # distributed under", "is not None: cli_config.check_count = check_count if current_version is not", "cli_config.check_count = check_count if current_version is not None: cli_config.current_version =", "current_version if server_versions is not None: cli_config.server_versions = server_versions if", "None: cli_config.log_handler = log_handler CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls):", "count > cls.FREQUENCY: return True config = cls.get_config_or_default() if config.current_version", "return True config = cls.get_config_or_default() if config.current_version is None or", "from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration", "def should_check(cls): count = cls._get_count() cls.reset(check_count=count) if count > cls.FREQUENCY:", "access cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME =", "License. # coding: utf-8 from __future__ import absolute_import, division, print_function", "implied. # See the License for the specific language governing", "def _get_count(cls): config = cls.get_config_or_default() return config.check_count + 1 @classmethod", "under the Apache License, Version 2.0 (the \"License\"); # you", "2019 Polyaxon, Inc. # # Licensed under the Apache License,", "\"License\"); # you may not use this file except in", "@classmethod def _get_count(cls): config = cls.get_config_or_default() return config.check_count + 1", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "and # limitations under the License. # coding: utf-8 from", "the License. # coding: utf-8 from __future__ import absolute_import, division,", "not None: cli_config.check_count = check_count if current_version is not None:", "from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import CliConfigurationConfig class CliConfigManager(BaseConfigManager):", "by applicable law or agreed to in writing, software #", "# distributed under the License is distributed on an \"AS", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "check_count if current_version is not None: cli_config.current_version = current_version if", "current_version is not None: cli_config.current_version = current_version if server_versions is", "may obtain a copy of the License at # #", "# Unless required by applicable law or agreed to in", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "under the License. # coding: utf-8 from __future__ import absolute_import,", "IS_GLOBAL = True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY", "not any([check_count, current_version, server_versions, log_handler]): return cli_config = cls.get_config_or_default() if", "CliConfigurationConfig class CliConfigManager(BaseConfigManager): \"\"\"Manages access cli configuration .polyaxoncli file.\"\"\" IS_GLOBAL", "LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from polyaxon.schemas.cli.cli_configuration import", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "distutils.version import LooseVersion # pylint:disable=import-error from polyaxon.managers.base import BaseConfigManager from", "to in writing, software # distributed under the License is", "True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY = 3", "): if not any([check_count, current_version, server_versions, log_handler]): return cli_config =", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "Inc. # # Licensed under the Apache License, Version 2.0", "# See the License for the specific language governing permissions", "Copyright 2019 Polyaxon, Inc. # # Licensed under the Apache", "utf-8 from __future__ import absolute_import, division, print_function from distutils.version import", "config.current_version is None or config.min_version is None: return True return", "You may obtain a copy of the License at #", "@classmethod def reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ): if", "language governing permissions and # limitations under the License. #", "= 3 @classmethod def _get_count(cls): config = cls.get_config_or_default() return config.check_count", "may not use this file except in compliance with the", "or agreed to in writing, software # distributed under the", "not None: cli_config.server_versions = server_versions if log_handler is not None:", "> cls.FREQUENCY: return True config = cls.get_config_or_default() if config.current_version is", "required by applicable law or agreed to in writing, software", "= cls.get_config_or_default() if config.current_version is None or config.min_version is None:", "config.check_count + 1 @classmethod def reset( cls, check_count=None, current_version=None, server_versions=None,", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "configuration .polyaxoncli file.\"\"\" IS_GLOBAL = True CONFIG_FILE_NAME = \".polyaxoncli\" CONFIG", "with the License. # You may obtain a copy of", "this file except in compliance with the License. # You", "check_count=None, current_version=None, server_versions=None, log_handler=None, ): if not any([check_count, current_version, server_versions,", "the Apache License, Version 2.0 (the \"License\"); # you may", "= \".polyaxoncli\" CONFIG = CliConfigurationConfig FREQUENCY = 3 @classmethod def", "if check_count is not None: cli_config.check_count = check_count if current_version", "limitations under the License. # coding: utf-8 from __future__ import", "= CliConfigurationConfig FREQUENCY = 3 @classmethod def _get_count(cls): config =", "current_version=None, server_versions=None, log_handler=None, ): if not any([check_count, current_version, server_versions, log_handler]):", "CliConfigManager.set_config(config=cli_config) return cli_config @classmethod def should_check(cls): count = cls._get_count() cls.reset(check_count=count)", "cls.FREQUENCY: return True config = cls.get_config_or_default() if config.current_version is None", "return cli_config = cls.get_config_or_default() if check_count is not None: cli_config.check_count", "reset( cls, check_count=None, current_version=None, server_versions=None, log_handler=None, ): if not any([check_count,", "if current_version is not None: cli_config.current_version = current_version if server_versions" ]
[ "Null() def test_string(valid_instance: Null): assert str(valid_instance) == 'NULL' def test_obj(valid_instance:", "return Null() def test_string(valid_instance: Null): assert str(valid_instance) == 'NULL' def", "str(valid_instance) == 'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null'", "== 'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null' def", "Null): assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null): assert valid_instance.json()", "valid_instance() -> Null: return Null() def test_string(valid_instance: Null): assert str(valid_instance)", "Null @pytest.fixture(scope=\"module\") def valid_instance() -> Null: return Null() def test_string(valid_instance:", "duckql.properties import Null @pytest.fixture(scope=\"module\") def valid_instance() -> Null: return Null()", "@pytest.fixture(scope=\"module\") def valid_instance() -> Null: return Null() def test_string(valid_instance: Null):", "assert str(valid_instance) == 'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj ==", "-> Null: return Null() def test_string(valid_instance: Null): assert str(valid_instance) ==", "def test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null):", "from duckql.properties import Null @pytest.fixture(scope=\"module\") def valid_instance() -> Null: return", "import pytest from duckql.properties import Null @pytest.fixture(scope=\"module\") def valid_instance() ->", "Null: return Null() def test_string(valid_instance: Null): assert str(valid_instance) == 'NULL'", "def test_string(valid_instance: Null): assert str(valid_instance) == 'NULL' def test_obj(valid_instance: Null):", "== 'properties.Null' def test_json_parse(valid_instance: Null): assert valid_instance.json() == '{\"obj\": \"properties.Null\"}'", "test_string(valid_instance: Null): assert str(valid_instance) == 'NULL' def test_obj(valid_instance: Null): assert", "'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance:", "assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null): assert valid_instance.json() ==", "import Null @pytest.fixture(scope=\"module\") def valid_instance() -> Null: return Null() def", "valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null): assert valid_instance.json() == '{\"obj\":", "def valid_instance() -> Null: return Null() def test_string(valid_instance: Null): assert", "Null): assert str(valid_instance) == 'NULL' def test_obj(valid_instance: Null): assert valid_instance.obj", "test_obj(valid_instance: Null): assert valid_instance.obj == 'properties.Null' def test_json_parse(valid_instance: Null): assert", "pytest from duckql.properties import Null @pytest.fixture(scope=\"module\") def valid_instance() -> Null:" ]
[ "# Boston, MA 02111-1307, USA. # import os from contextlib", "Suite 330, # Boston, MA 02111-1307, USA. # import os", "i, param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s' %", "name = name.replace(' ', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name)", "header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\"", "header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, )) for", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU", "in func.parameters: self.out_c.write(\" * @%s\" % (param.argname, )) if param.direction", "self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for node in self.namespace.values(): if", "os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, ))", "if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' * Returns: ')", "self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def", "to the # Free Software Foundation, Inc., 59 Temple Place", "WITHOUT ANY WARRANTY; without even the implied warranty of #", "if (isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix =", "for header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir =", "return param.type.ctype + suffix def _write_prelude(self, out, func): if self.function_decoration:", "Free Software Foundation, Inc., 59 Temple Place - Suite 330,", "ast.Function) self._function_bodies[node] = body def codegen(self): self.out_h = open(self.out_h_filename, 'w')", "out.write(\"\"\" %s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters)", "self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname)) if i < l", "= self._function_bodies.get(node) if not body: body = '' self.out_c.write(body) self._codegen_end()", "\") else: out.write('void') out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\")", "self.include_first_src = include_first_src self.include_last_src = include_last_src self._function_bodies = {} self.namespace", "EDIT */\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for header in self.include_first_header:", "for introspecting GObject libraries # Copyright (C) 2010 Red Hat,", "def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT,", "*\\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def", "warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\")", "Temple Place - Suite 330, # Boston, MA 02111-1307, USA.", "node): if (node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer", "len(func.parameters) if func.parameters: for i, param in enumerate(func.parameters): ctype =", "% \" \".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol))", "% (param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates:", "a copy of the GNU Lesser General Public # License", "%s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if func.parameters:", ". import ast class CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename,", "ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING,", "(self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and param.direction", "= '/* GENERATED BY testcodegen.py; DO NOT EDIT */\\n\\n' self.out_h.write(warning)", "-*- Mode: Python -*- # GObject-Introspection - a framework for", "you can redistribute it and/or # modify it under the", "def _write_prelude(self, out, func): if self.function_decoration: out.write(\"\"\" %s\"\"\" % \"", "* Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def _function(self, func):", "along with this library; if not, write to the #", "Mode: Python -*- # GObject-Introspection - a framework for introspecting", "header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self, node, body):", "include_last_src self._function_bodies = {} self.namespace = namespace def gen_symbol(self, name):", "import contextmanager from . import ast class CCodeGenerator(object): def __init__(self,", "(ctype, param.argname)) if i < l - 1: out.write(\", \")", "%s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval),", "Boston, MA 02111-1307, USA. # import os from contextlib import", "\"const gchar*\" + suffix return param.type.ctype + suffix def _write_prelude(self,", "self.out_c.write('\\n */') @contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func)", "function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename =", "os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, )) for header in", "param.transfer == ast.PARAM_TRANSFER_NONE): return \"const gchar*\" + suffix return param.type.ctype", "self.out_c.write(' * Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write('", "59 Temple Place - Suite 330, # Boston, MA 02111-1307,", "self.include_last_src = include_last_src self._function_bodies = {} self.namespace = namespace def", "Foundation, Inc., 59 Temple Place - Suite 330, # Boston,", "self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename,", "General Public License for more details. # # You should", "either # version 2 of the License, or (at your", "(node.transfer, )) def _write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol,", "def set_function_body(self, node, body): assert isinstance(node, ast.Function) self._function_bodies[node] = body", "') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func)", "func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self):", "or FITNESS FOR A PARTICULAR PURPOSE. See the GNU #", "Inc. # # This library is free software; you can", "even the implied warranty of # MERCHANTABILITY or FITNESS FOR", "self.include_first_header = include_first_header self.include_last_header = include_last_header self.include_first_src = include_first_src self.include_last_src", "the implied warranty of # MERCHANTABILITY or FITNESS FOR A", "Copyright (C) 2010 Red Hat, Inc. # # This library", "self.include_last_header = include_last_header self.include_first_src = include_first_src self.include_last_src = include_last_src self._function_bodies", "the # Free Software Foundation, Inc., 59 Temple Place -", "src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" %", "import ast class CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[],", "return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if (isinstance(param,", "param.caller_allocates: allocate_string = ' caller-allocates' else: allocate_string = '' self.out_c.write(\":", "Software Foundation, Inc., 59 Temple Place - Suite 330, #", "# License as published by the Free Software Foundation; either", "header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef __%s_H__", "= open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for node", "Lesser General Public # License as published by the Free", "GObject libraries # Copyright (C) 2010 Red Hat, Inc. #", "(nsupper, nsupper)) for header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header)", "have received a copy of the GNU Lesser General Public", "(node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" %", "def gen_symbol(self, name): name = name.replace(' ', '_') return '%s_%s'", "= function_decoration self.include_first_header = include_first_header self.include_last_header = include_last_header self.include_first_src =", "(ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix = '' if", "self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning", "self._function_bodies[node] = body def codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c", ")) for header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def", "self._codegen_start() for node in self.namespace.values(): if isinstance(node, ast.Function): with self._function(node):", "* Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' *", "if isinstance(node, ast.Function): with self._function(node): body = self._function_bodies.get(node) if not", "*/') @contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\")", "License for more details. # # You should have received", "MA 02111-1307, USA. # import os from contextlib import contextmanager", "namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename =", "return \"const gchar*\" + suffix return param.type.ctype + suffix def", "= self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname)) if i <", "self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n", "func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval)", "% (func.symbol, )) for param in func.parameters: self.out_c.write(\" * @%s\"", "Place - Suite 330, # Boston, MA 02111-1307, USA. #", "self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__ #include <glib-object.h> \"\"\" % (nsupper,", "self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if (node.type not in", "= '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return", "open(self.out_c_filename, 'w') self._codegen_start() for node in self.namespace.values(): if isinstance(node, ast.Function):", "testcodegen.py; DO NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for", "modify it under the terms of the GNU Lesser General", "'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for node in self.namespace.values():", "else: out.write('void') out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def", "out.write(\", \") else: out.write('void') out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h, func)", "details. # # You should have received a copy of", "if param.caller_allocates: allocate_string = ' caller-allocates' else: allocate_string = ''", "library is free software; you can redistribute it and/or #", "(param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return \"const gchar*\" +", "Foundation; either # version 2 of the License, or (at", "1: out.write(\", \") else: out.write('void') out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h,", "# Free Software Foundation, Inc., 59 Temple Place - Suite", "USA. # import os from contextlib import contextmanager from .", "ast class CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[],", "include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename self.function_decoration =", "% (header, )) for header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" %", "by the Free Software Foundation; either # version 2 of", "from contextlib import contextmanager from . import ast class CCodeGenerator(object):", "node in self.namespace.values(): if isinstance(node, ast.Function): with self._function(node): body =", "if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = '", "libraries # Copyright (C) 2010 Red Hat, Inc. # #", "self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if (node.type not in ast.BASIC_TYPES or", "self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c,", "self.out_c.write('\\n *\\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager", "% (nsupper, nsupper)) for header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" %", "out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename", "name.replace(' ', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self,", "suffix = '*' else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME))", "not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" % (node.transfer,", "ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else:", "NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for header in", "FOR A PARTICULAR PURPOSE. See the GNU # Lesser General", "== ast.PARAM_TRANSFER_NONE): return \"const gchar*\" + suffix return param.type.ctype +", "class CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[],", "# modify it under the terms of the GNU Lesser", "self._function(node): body = self._function_bodies.get(node) if not body: body = ''", "if i < l - 1: out.write(\", \") else: out.write('void')", "import os from contextlib import contextmanager from . import ast", "isinstance(node, ast.Function) self._function_bodies[node] = body def codegen(self): self.out_h = open(self.out_h_filename,", "Free Software Foundation; either # version 2 of the License,", "from . import ast class CCodeGenerator(object): def __init__(self, namespace, out_h_filename,", "License along with this library; if not, write to the", "def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]):", "Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' * Returns:", "_write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if (node.type", "@%s\" % (param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if", "'/* GENERATED BY testcodegen.py; DO NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper", "if self.function_decoration: out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\" %s %s", "it and/or # modify it under the terms of the", ")) for param in func.parameters: self.out_c.write(\" * @%s\" % (param.argname,", "contextlib import contextmanager from . import ast class CCodeGenerator(object): def", "include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename self.function_decoration = function_decoration", "BY testcodegen.py; DO NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper()", "that it will be useful, # but WITHOUT ANY WARRANTY;", "function_decoration self.include_first_header = include_first_header self.include_last_header = include_last_header self.include_first_src = include_first_src", "gen_symbol(self, name): name = name.replace(' ', '_') return '%s_%s' %", "self.function_decoration = function_decoration self.include_first_header = include_first_header self.include_last_header = include_last_header self.include_first_src", "out.write('void') out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self,", "param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = ' caller-allocates'", "name): name = name.replace(' ', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0],", "introspecting GObject libraries # Copyright (C) 2010 Red Hat, Inc.", "This library is distributed in the hope that it will", "(%s%s) \" % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write('", "'*' else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer", "more details. # # You should have received a copy", "*\\n') self.out_c.write(' * Undocumented.') if func.retval.type != ast.TYPE_NONE: self.out_c.write('\\n *\\n')", "self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__", "src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, )) for header in self.include_last_src:", "\"%s\"\\n\"\"\" % header) self.out_c.write(warning) for header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\"", "be useful, # but WITHOUT ANY WARRANTY; without even the", "write to the # Free Software Foundation, Inc., 59 Temple", "and/or # modify it under the terms of the GNU", "the GNU Lesser General Public # License as published by", "(self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if func.parameters: for i, param", "* @%s\" % (param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT):", "Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def _function(self, func): self._write_prototype(func)", "#ifndef __%s_H__ #define __%s_H__ #include <glib-object.h> \"\"\" % (nsupper, nsupper))", "assert isinstance(node, ast.Function) self._function_bodies[node] = body def codegen(self): self.out_h =", "param.type.ctype + suffix def _write_prelude(self, out, func): if self.function_decoration: out.write(\"\"\"", "with this library; if not, write to the # Free", "body): assert isinstance(node, ast.Function) self._function_bodies[node] = body def codegen(self): self.out_h", "_codegen_start(self): warning = '/* GENERATED BY testcodegen.py; DO NOT EDIT", "\" \".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l", "= os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, )) for header", "= include_last_header self.include_first_src = include_first_src self.include_last_src = include_last_src self._function_bodies =", "self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning = '/* GENERATED BY testcodegen.py; DO", "PARTICULAR PURPOSE. See the GNU # Lesser General Public License", "self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close()", "should have received a copy of the GNU Lesser General", "(func.symbol, )) for param in func.parameters: self.out_c.write(\" * @%s\" %", "self.out_c.write(\": (%s%s) \" % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n')", "self.out_h.close() self.out_c.close() def set_function_body(self, node, body): assert isinstance(node, ast.Function) self._function_bodies[node]", "allocate_string = ' caller-allocates' else: allocate_string = '' self.out_c.write(\": (%s%s)", "software; you can redistribute it and/or # modify it under", "out_c_filename self.function_decoration = function_decoration self.include_first_header = include_first_header self.include_last_header = include_last_header", "l = len(func.parameters) if func.parameters: for i, param in enumerate(func.parameters):", "of the GNU Lesser General Public # License as published", "later version. # # This library is distributed in the", "= self.namespace.name.upper() for header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header)", "include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename", "or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" % (node.transfer, )) def _write_docs(self,", "PURPOSE. See the GNU # Lesser General Public License for", "param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix =", "# # This library is free software; you can redistribute", "self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning) for header in self.include_first_src: self.out_c.write(\"\"\"#include", "__%s_H__ #include <glib-object.h> \"\"\" % (nsupper, nsupper)) for header in", "% header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self, node,", "self.out_c.write(warning) for header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir", "\"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__ #include <glib-object.h>", "framework for introspecting GObject libraries # Copyright (C) 2010 Red", "(header, )) for header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header)", "or (at your option) any later version. # # This", "for param in func.parameters: self.out_c.write(\" * @%s\" % (param.argname, ))", "param in func.parameters: self.out_c.write(\" * @%s\" % (param.argname, )) if", "<gh_stars>0 # -*- Mode: Python -*- # GObject-Introspection - a", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "'w') self._codegen_start() for node in self.namespace.values(): if isinstance(node, ast.Function): with", "and param.transfer == ast.PARAM_TRANSFER_NONE): return \"const gchar*\" + suffix return", "implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "out.write(\")\") def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node):", "in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname))", "l - 1: out.write(\", \") else: out.write('void') out.write(\")\") def _write_prototype(self,", "of the GNU Lesser General Public # License along with", "in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header", "self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__ #include", "(param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.') if", "for header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning) for", "it will be useful, # but WITHOUT ANY WARRANTY; without", "out, func): if self.function_decoration: out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\"", "def _codegen_start(self): warning = '/* GENERATED BY testcodegen.py; DO NOT", "include_last_header self.include_first_src = include_first_src self.include_last_src = include_last_src self._function_bodies = {}", "redistribute it and/or # modify it under the terms of", "ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return \"const gchar*\" + suffix", "= open(self.out_c_filename, 'w') self._codegen_start() for node in self.namespace.values(): if isinstance(node,", "if func.parameters: for i, param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param)", "header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\")", "hope that it will be useful, # but WITHOUT ANY", "def _write_annotation_transfer(self, node): if (node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')):", "'' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return \"const", "library is distributed in the hope that it will be", "< l - 1: out.write(\", \") else: out.write('void') out.write(\")\") def", "out.write('%s %s' % (ctype, param.argname)) if i < l -", "for header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def _codegen_end(self):", "the License, or (at your option) any later version. #", "func): if self.function_decoration: out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\" %s", "= include_last_src self._function_bodies = {} self.namespace = namespace def gen_symbol(self,", "i < l - 1: out.write(\", \") else: out.write('void') out.write(\")\")", "= os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header,", "(transfer %s)\" % (node.transfer, )) def _write_docs(self, func): self.out_c.write(\"/**\\n *", "will be useful, # but WITHOUT ANY WARRANTY; without even", "# -*- Mode: Python -*- # GObject-Introspection - a framework", "ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */')", "self.namespace = namespace def gen_symbol(self, name): name = name.replace(' ',", "open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for node in", "%s:\\n\" % (func.symbol, )) for param in func.parameters: self.out_c.write(\" *", "with self._function(node): body = self._function_bodies.get(node) if not body: body =", "in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix = ''", "(param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string", "param): if (isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix", "This library is free software; you can redistribute it and/or", "body def codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename,", "# This library is distributed in the hope that it", "contextmanager from . import ast class CCodeGenerator(object): def __init__(self, namespace,", "for i, param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s'", "set_function_body(self, node, body): assert isinstance(node, ast.Function) self._function_bodies[node] = body def", "(isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*'", "for more details. # # You should have received a", "name) def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and param.direction in", "self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.') if func.retval.type != ast.TYPE_NONE:", "suffix return param.type.ctype + suffix def _write_prelude(self, out, func): if", "in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = ' caller-allocates' else:", "header) self.out_c.write(warning) for header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header)", "'' self.out_c.write(\": (%s%s) \" % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write('", "ast.PARAM_TRANSFER_NONE): return \"const gchar*\" + suffix return param.type.ctype + suffix", "free software; you can redistribute it and/or # modify it", "Red Hat, Inc. # # This library is free software;", "and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)): suffix = '*' else: suffix", "func.symbol)) l = len(func.parameters) if func.parameters: for i, param in", "is free software; you can redistribute it and/or # modify", "2010 Red Hat, Inc. # # This library is free", "func): self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol, )) for param in", "suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE):", "_write_annotation_transfer(self, node): if (node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\"", "else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer ==", "ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY", "\"\"\" % (nsupper, nsupper)) for header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\"", "\".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l =", "% (ctype, param.argname)) if i < l - 1: out.write(\",", "self.out_h = open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start() for", "A PARTICULAR PURPOSE. See the GNU # Lesser General Public", "received a copy of the GNU Lesser General Public #", "option) any later version. # # This library is distributed", "%s %s (\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if", "published by the Free Software Foundation; either # version 2", "'_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if", "License, or (at your option) any later version. # #", "under the terms of the GNU Lesser General Public #", "the hope that it will be useful, # but WITHOUT", "suffix def _write_prelude(self, out, func): if self.function_decoration: out.write(\"\"\" %s\"\"\" %", "{} self.namespace = namespace def gen_symbol(self, name): name = name.replace('", "% header) self.out_c.write(warning) for header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" %", "def _write_prototype(self, func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if", "' caller-allocates' else: allocate_string = '' self.out_c.write(\": (%s%s) \" %", "isinstance(node, ast.Function): with self._function(node): body = self._function_bodies.get(node) if not body:", "GNU # Lesser General Public License for more details. #", "library; if not, write to the # Free Software Foundation,", "out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\" %", "GENERATED BY testcodegen.py; DO NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper =", "Inc., 59 Temple Place - Suite 330, # Boston, MA", "not, write to the # Free Software Foundation, Inc., 59", "- Suite 330, # Boston, MA 02111-1307, USA. # import", "= include_first_header self.include_last_header = include_last_header self.include_first_src = include_first_src self.include_last_src =", "(\"\"\" % (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if func.parameters: for", "self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename self.function_decoration = function_decoration self.include_first_header", "ast.Function): with self._function(node): body = self._function_bodies.get(node) if not body: body", "self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self, node, body): assert isinstance(node, ast.Function)", "You should have received a copy of the GNU Lesser", "header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning) for header", "# Lesser General Public License for more details. # #", "Hat, Inc. # # This library is free software; you", "for node in self.namespace.values(): if isinstance(node, ast.Function): with self._function(node): body", "Software Foundation; either # version 2 of the License, or", "in self.namespace.values(): if isinstance(node, ast.Function): with self._function(node): body = self._function_bodies.get(node)", "node, body): assert isinstance(node, ast.Function) self._function_bodies[node] = body def codegen(self):", "= out_c_filename self.function_decoration = function_decoration self.include_first_header = include_first_header self.include_last_header =", "nsupper)) for header in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning)", "distributed in the hope that it will be useful, #", "allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.') if func.retval.type", "= '' self.out_c.write(\": (%s%s) \" % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\")", "of the License, or (at your option) any later version.", "= out_h_filename self.out_c_filename = out_c_filename self.function_decoration = function_decoration self.include_first_header =", "param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype,", "# GObject-Introspection - a framework for introspecting GObject libraries #", "in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close()", "% (self._typecontainer_to_ctype(func.retval), func.symbol)) l = len(func.parameters) if func.parameters: for i,", "in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define", "#define __%s_H__ #include <glib-object.h> \"\"\" % (nsupper, nsupper)) for header", "-*- # GObject-Introspection - a framework for introspecting GObject libraries", "\"%s\"\\n\"\"\" % header) def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self,", ")) if param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string =", "_typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and param.direction in (ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT)):", "func): self._write_prelude(self.out_h, func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if (node.type not", "% header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__ #include <glib-object.h> \"\"\"", "body = self._function_bodies.get(node) if not body: body = '' self.out_c.write(body)", "# Copyright (C) 2010 Red Hat, Inc. # # This", "useful, # but WITHOUT ANY WARRANTY; without even the implied", "= include_first_src self.include_last_src = include_last_src self._function_bodies = {} self.namespace =", "% header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir) self.out_c.write(\"\"\"#include", "self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header =", "terms of the GNU Lesser General Public # License as", "self.out_c.close() def set_function_body(self, node, body): assert isinstance(node, ast.Function) self._function_bodies[node] =", "self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning) for header in self.include_first_src:", "# You should have received a copy of the GNU", "(ast.PARAM_DIRECTION_OUT, ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = ' caller-allocates' else: allocate_string", "_write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol, )) for param", "+ suffix return param.type.ctype + suffix def _write_prelude(self, out, func):", "in self.include_last_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_c.write(warning) for header in", "Lesser General Public License for more details. # # You", "# import os from contextlib import contextmanager from . import", "func.parameters: for i, param in enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s", "% (node.transfer, )) def _write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\" %", "enumerate(func.parameters): ctype = self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname)) if", "self._function_bodies = {} self.namespace = namespace def gen_symbol(self, name): name", "See the GNU # Lesser General Public License for more", "include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename = out_c_filename self.function_decoration", "_write_prelude(self, out, func): if self.function_decoration: out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration))", "2 of the License, or (at your option) any later", "ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" % (node.transfer, )) def", "#include <glib-object.h> \"\"\" % (nsupper, nsupper)) for header in self.include_last_header:", "= {} self.namespace = namespace def gen_symbol(self, name): name =", "Python -*- # GObject-Introspection - a framework for introspecting GObject", "if (node.type not in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\"", "= ' caller-allocates' else: allocate_string = '' self.out_c.write(\": (%s%s) \"", "self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.') if func.retval.type !=", "\" % (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' *", "self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol, )) for param in func.parameters:", "for header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\" #ifndef", "out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename = out_h_filename self.out_c_filename", "func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning = '/* GENERATED", "# # This library is distributed in the hope that", "= len(func.parameters) if func.parameters: for i, param in enumerate(func.parameters): ctype", "= body def codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c =", "02111-1307, USA. # import os from contextlib import contextmanager from", "of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "as published by the Free Software Foundation; either # version", "node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" % (node.transfer, )) def _write_docs(self, func):", "warning = '/* GENERATED BY testcodegen.py; DO NOT EDIT */\\n\\n'", "func.parameters: self.out_c.write(\" * @%s\" % (param.argname, )) if param.direction in", "it under the terms of the GNU Lesser General Public", "include_first_header self.include_last_header = include_last_header self.include_first_src = include_first_src self.include_last_src = include_last_src", "GNU Lesser General Public # License as published by the", "caller-allocates' else: allocate_string = '' self.out_c.write(\": (%s%s) \" % (param.direction,", "header) self.out_h.write(\"\"\" #ifndef __%s_H__ #define __%s_H__ #include <glib-object.h> \"\"\" %", "version. # # This library is distributed in the hope", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser", "copy of the GNU Lesser General Public # License along", "Public License for more details. # # You should have", "% (param.direction, allocate_string)) self._write_annotation_transfer(param) self.out_c.write(\":\\n\") self.out_c.write(' *\\n') self.out_c.write(' * Undocumented.')", "330, # Boston, MA 02111-1307, USA. # import os from", "out_h_filename self.out_c_filename = out_c_filename self.function_decoration = function_decoration self.include_first_header = include_first_header", "param.argname)) if i < l - 1: out.write(\", \") else:", "DO NOT EDIT */\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for header", "# version 2 of the License, or (at your option)", "_function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def", "Public # License along with this library; if not, write", "nsupper = self.namespace.name.upper() for header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" %", "any later version. # # This library is distributed in", "def _codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self, node, body): assert", "self.namespace.name.upper() for header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\" % header) self.out_h.write(\"\"\"", "self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning = '/*", "version 2 of the License, or (at your option) any", "this library; if not, write to the # Free Software", "ast.PARAM_DIRECTION_INOUT): if param.caller_allocates: allocate_string = ' caller-allocates' else: allocate_string =", "', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param):", "# This library is free software; you can redistribute it", "self.function_decoration: out.write(\"\"\" %s\"\"\" % \" \".join(self.function_decoration)) out.write(\"\"\" %s %s (\"\"\"", "General Public # License along with this library; if not,", "% (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter) and", "- 1: out.write(\", \") else: out.write('void') out.write(\")\") def _write_prototype(self, func):", "self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning =", "can redistribute it and/or # modify it under the terms", "*/\\n\\n' self.out_h.write(warning) nsupper = self.namespace.name.upper() for header in self.include_first_header: self.out_h.write(\"\"\"#include", "self.out_c_filename = out_c_filename self.function_decoration = function_decoration self.include_first_header = include_first_header self.include_last_header", "include_first_src self.include_last_src = include_last_src self._function_bodies = {} self.namespace = namespace", "CCodeGenerator(object): def __init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[],", "!= ast.TYPE_NONE: self.out_c.write('\\n *\\n') self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n", "self.out_h.write(warning) nsupper = self.namespace.name.upper() for header in self.include_first_header: self.out_h.write(\"\"\"#include \"%s\"\\n\"\"\"", "else: allocate_string = '' self.out_c.write(\": (%s%s) \" % (param.direction, allocate_string))", "= name.replace(' ', '_') return '%s_%s' % (self.namespace.symbol_prefixes[0], name) def", "<glib-object.h> \"\"\" % (nsupper, nsupper)) for header in self.include_last_header: self.out_h.write(\"\"\"#include", "%s)\" % (node.transfer, )) def _write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\"", "@contextmanager def _function(self, func): self._write_prototype(func) self._write_docs(func) self._write_prelude(self.out_c, func) self.out_c.write(\"\\n{\\n\") yield", "- a framework for introspecting GObject libraries # Copyright (C)", "self.out_c.write(\"\\n{\\n\") yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning = '/* GENERATED BY", "Public # License as published by the Free Software Foundation;", "%s' % (ctype, param.argname)) if i < l - 1:", "def _write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol, )) for", "'%s_%s' % (self.namespace.symbol_prefixes[0], name) def _typecontainer_to_ctype(self, param): if (isinstance(param, ast.Parameter)", "_codegen_end(self): self.out_h.write(\"\"\"#endif\\n\"\"\") self.out_h.close() self.out_c.close() def set_function_body(self, node, body): assert isinstance(node,", "\"%s\"\\n\\n\"\"\" % (header, )) for header in self.include_last_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\"", "= '*' else: suffix = '' if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and", "License as published by the Free Software Foundation; either #", "in the hope that it will be useful, # but", "self.out_c.write(\"\"\"#include \"%s\"\\n\\n\"\"\" % (header, )) for header in self.include_last_src: self.out_c.write(\"\"\"#include", "your option) any later version. # # This library is", "= namespace def gen_symbol(self, name): name = name.replace(' ', '_')", "func) self.out_h.write(\";\\n\\n\") def _write_annotation_transfer(self, node): if (node.type not in ast.BASIC_TYPES", "namespace def gen_symbol(self, name): name = name.replace(' ', '_') return", "General Public # License as published by the Free Software", "the Free Software Foundation; either # version 2 of the", "allocate_string = '' self.out_c.write(\": (%s%s) \" % (param.direction, allocate_string)) self._write_annotation_transfer(param)", "GObject-Introspection - a framework for introspecting GObject libraries # Copyright", "self.out_c.write(\" (transfer %s)\" % (node.transfer, )) def _write_docs(self, func): self.out_c.write(\"/**\\n", "os from contextlib import contextmanager from . import ast class", ")) def _write_docs(self, func): self.out_c.write(\"/**\\n * %s:\\n\" % (func.symbol, ))", "self.out_c.write(\" * @%s\" % (param.argname, )) if param.direction in (ast.PARAM_DIRECTION_OUT,", "GNU Lesser General Public # License along with this library;", "__%s_H__ #define __%s_H__ #include <glib-object.h> \"\"\" % (nsupper, nsupper)) for", "self.out_c.write(' * Returns: ') self._write_annotation_transfer(func.retval) self.out_c.write('\\n */') @contextmanager def _function(self,", "header in self.include_first_src: self.out_c.write(\"\"\"#include \"%s\"\\n\"\"\" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name))", "# License along with this library; if not, write to", "ctype = self._typecontainer_to_ctype(param) out.write('%s %s' % (ctype, param.argname)) if i", "the GNU Lesser General Public # License along with this", "* %s:\\n\" % (func.symbol, )) for param in func.parameters: self.out_c.write(\"", "(at your option) any later version. # # This library", "a framework for introspecting GObject libraries # Copyright (C) 2010", "+ suffix def _write_prelude(self, out, func): if self.function_decoration: out.write(\"\"\" %s\"\"\"", "gchar*\" + suffix return param.type.ctype + suffix def _write_prelude(self, out,", "if (param.type.is_equiv((ast.TYPE_STRING, ast.TYPE_FILENAME)) and param.transfer == ast.PARAM_TRANSFER_NONE): return \"const gchar*\"", "WARRANTY; without even the implied warranty of # MERCHANTABILITY or", "self.namespace.values(): if isinstance(node, ast.Function): with self._function(node): body = self._function_bodies.get(node) if", "\"%s\"\\n\"\"\" % header) src_dir = os.path.dirname(os.path.realpath(self.out_c.name)) header = os.path.relpath(self.out_h_filename, src_dir)", "__init__(self, namespace, out_h_filename, out_c_filename, function_decoration=[], include_first_header=[], include_last_header=[], include_first_src=[], include_last_src=[]): self.out_h_filename", "the terms of the GNU Lesser General Public # License", "(C) 2010 Red Hat, Inc. # # This library is", "Lesser General Public # License along with this library; if", "yield self.out_c.write(\"}\\n\\n\") def _codegen_start(self): warning = '/* GENERATED BY testcodegen.py;", "is distributed in the hope that it will be useful,", "codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w') self._codegen_start()", "if not, write to the # Free Software Foundation, Inc.,", "# # You should have received a copy of the", "the GNU # Lesser General Public License for more details.", "def codegen(self): self.out_h = open(self.out_h_filename, 'w') self.out_c = open(self.out_c_filename, 'w')", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "without even the implied warranty of # MERCHANTABILITY or FITNESS", "in ast.BASIC_TYPES or node.type.ctype.endswith('*')): self.out_c.write(\" (transfer %s)\" % (node.transfer, ))" ]
[ "environment variable in order for subsequent calls to use this", "Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.", "True, ) -> None: self.deterministic = deterministic self.deterministic_function = deterministic_function", "to stdout \"\"\" def __init__(self, command: tp.List[str], verbose: bool =", "under the MIT license found in the # LICENSE file", "str Everything that has been sent to stdout \"\"\" def", "of descriptors for the parametrization This can be used within", "dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name) /", "None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable in order for subsequent", "bool = True, deterministic_function: bool = True, monoobjective: bool =", "it goes through the pipeline and notify when it is", "LICENSE file in the root directory of this source tree.", "\"\"\" def __init__(self, command: tp.List[str], verbose: bool = False, cwd:", "sent: {full_command}\") outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,", "other: \"Descriptors\") -> \"Descriptors\": values = {field: getattr(self, field) &", "command must run from Returns ------- str Everything that has", "CLEAN_COPY_DIRECTORY environment variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls,", "y) for x, y in kwargs.items()] if self.verbose: print(f\"The following", "if the job fails, or sent as output of the", "self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed during processing \"\"\" class CommandFunction:", "None else str(cwd) self.env = env def __call__(self, *args: tp.Any,", "will be printed if the job fails, or sent as", "process.kill() process.wait() raise FailedJobError(\"Job got killed for an unknown reason.\")", "the following command more robust (probably fails in multiple cases)", "is finished. The output is a string containing everything that", "reason.\") stderr = process.communicate()[1] # we already got stdout stdout", "following command is sent: {full_command}\") outlines: tp.List[str] = [] with", "True, continuous: bool = True, metrizable: bool = True, ordered:", "temporary directory will be created can be controlled through the", "the contextmanager returns the clean copy path - the directory", "They will be printed if the job fails, or sent", "shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path: super().__enter__() return self.copyname class", "string containing everything that has been sent to stdout Parameters", "the root directory of this source tree. import os import", "to make sure it goes through the pipeline and notify", "the MIT license found in the # LICENSE file in", "created can be controlled through the CLEAN_COPY_DIRECTORY environment variable \"\"\"", "a list verbose: bool prints the command and stdout at", "def __and__(self, other: \"Descriptors\") -> \"Descriptors\": values = {field: getattr(self,", "os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source),", "list\") self.command = command self.verbose = verbose self.cwd = None", "robust (probably fails in multiple cases) full_command = self.command +", "str(cwd) self.env = env def __call__(self, *args: tp.Any, **kwargs: tp.Any)", "__init__( self, deterministic: bool = True, deterministic_function: bool = True,", "\"\"\"Wraps a command as a function in order to make", "sure it goes through the pipeline and notify when it", "license found in the # LICENSE file in the root", "to run, as a list verbose: bool prints the command", "sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type of operation", "this directory as base for the copies. \"\"\" assert Path(directory).exists(),", "for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError):", "+ [str(x) for x in args] + [\"--{}={}\".format(x, y) for", "or sent as output of the function Errors are provided", "shell=False, cwd=self.cwd, env=self.env) as process: try: assert process.stdout is not", "stdout = \"\\n\".join(outlines) retcode = process.poll() if stderr and (retcode", "CommandFunction: \"\"\"Wraps a command as a function in order to", "def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str: \"\"\"Call the", "an unknown reason.\") stderr = process.communicate()[1] # we already got", "source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None) -> None:", "the CLEAN_COPY_DIRECTORY environment variable in order for subsequent calls to", "TypeError(\"The command must be provided as a list\") self.command =", "clean copy path - the directory where the temporary directory", "addidional arguments The keyword arguments will be sent as --{key}={val}", "Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path: super().__enter__() return self.copyname", "be created can be controlled through the CLEAN_COPY_DIRECTORY environment variable", "but: - the created copy path is available through the", "return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type of operation is not", "at runtime cwd: Path/str path to the location where the", "for field in self.__dict__} return Descriptors(**values) def __repr__(self) -> str:", "= True, not_manyobjective: bool = True, continuous: bool = True,", "= verbose self.cwd = None if cwd is None else", "= subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from subprocess_error return", "subsequent calls to use this directory as base for the", "list verbose: bool prints the command and stdout at runtime", "= deterministic self.deterministic_function = deterministic_function self.continuous = continuous self.metrizable =", "sent to stdout Parameters ---------- command: list command to run,", "self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) ->", "when it is finished. The output is a string containing", "tp.Optional[tp.Dict[str, str]] = None) -> None: if not isinstance(command, list):", "env=self.env) as process: try: assert process.stdout is not None for", "# TODO add repr # pylint: disable=too-many-arguments def __init__( self,", "path - the directory where the temporary directory will be", "following command more robust (probably fails in multiple cases) full_command", "subprocess import typing as tp from pathlib import Path from", "where the temporary directory will be created can be controlled", "stderr \"\"\" # TODO make the following command more robust", "the directory where the temporary directory will be created can", "import tempfile import subprocess import typing as tp from pathlib", "we already got stdout stdout = \"\\n\".join(outlines) retcode = process.poll()", "raise FailedJobError(\"Job got killed for an unknown reason.\") stderr =", "cwd: tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str, str]] = None)", "is not supported by the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): #", "__call__(self, *args: tp.Any, **kwargs: tp.Any) -> str: \"\"\"Call the cammand", "except Exception: # pylint: disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job got", "True, ordered: bool = True, ) -> None: self.deterministic =", "be controlled through the CLEAN_COPY_DIRECTORY environment variable \"\"\" key =", "must run from Returns ------- str Everything that has been", "print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr)", "has been sent to stdout Parameters ---------- command: list command", "command: tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]] =", "internal stderr \"\"\" # TODO make the following command more", "= os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name) / Path(source).name", "through the copyname attribute - the contextmanager returns the clean", "def __init__(self, command: tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str,", "None: if not isinstance(command, list): raise TypeError(\"The command must be", "use this directory as base for the copies. \"\"\" assert", "-> None: self.deterministic = deterministic self.deterministic_function = deterministic_function self.continuous =", ") -> None: self.deterministic = deterministic self.deterministic_function = deterministic_function self.continuous", "Returns ------- str Everything that has been sent to stdout", "tp from pathlib import Path from nevergrad.common import tools as", "tp.Optional[tp.Union[Path, str]] = None) -> None: if dir is None:", "if self.verbose: print(f\"The following command is sent: {full_command}\") outlines: tp.List[str]", "shutil import tempfile import subprocess import typing as tp from", "the parametrization This can be used within optimizers. \"\"\" #", "in kwargs.items()] if self.verbose: print(f\"The following command is sent: {full_command}\")", "in self.__dict__} return Descriptors(**values) def __repr__(self) -> str: diff =", "command must be provided as a list\") self.command = command", "is not None for line in iter(process.stdout.readline, b''): if not", "True, deterministic_function: bool = True, monoobjective: bool = True, not_manyobjective:", "return Descriptors(**values) def __repr__(self) -> str: diff = \",\".join(f\"{x}={y}\" for", "getattr(other, field) for field in self.__dict__} return Descriptors(**values) def __repr__(self)", "stdout at runtime cwd: Path/str path to the location where", "subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try: assert", "True, metrizable: bool = True, ordered: bool = True, )", "finished. The output is a string containing everything that has", "disable=too-many-arguments def __init__( self, deterministic: bool = True, deterministic_function: bool", "continuous self.metrizable = metrizable self.ordered = ordered self.monoobjective = monoobjective", "TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates a full copy of a", "to use this directory as base for the copies. \"\"\"", "= True, ordered: bool = True, ) -> None: self.deterministic", "command and stdout at runtime cwd: Path/str path to the", "class FailedJobError(RuntimeError): \"\"\"Job failed during processing \"\"\" class CommandFunction: \"\"\"Wraps", "assert Path(directory).exists(), \"Directory does not exist\" os.environ[cls.key] = str(directory) #", "not_manyobjective: bool = True, continuous: bool = True, metrizable: bool", "+ [\"--{}={}\".format(x, y) for x, y in kwargs.items()] if self.verbose:", "a command as a function in order to make sure", "import subprocess import typing as tp from pathlib import Path", "This can be used within optimizers. \"\"\" # TODO add", "will be created can be controlled through the CLEAN_COPY_DIRECTORY environment", "def __repr__(self) -> str: diff = \",\".join(f\"{x}={y}\" for x, y", "and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error =", "to a set of descriptors for the parametrization This can", "outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except", "= True, monoobjective: bool = True, not_manyobjective: bool = True,", "= True, continuous: bool = True, metrizable: bool = True,", "cases) full_command = self.command + [str(x) for x in args]", "line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception: #", "flush=True) except Exception: # pylint: disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job", "operation is not supported by the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory):", "directory as base for the copies. \"\"\" assert Path(directory).exists(), \"Directory", "add repr # pylint: disable=too-many-arguments def __init__( self, deterministic: bool", "self.metrizable = metrizable self.ordered = ordered self.monoobjective = monoobjective self.not_manyobjective", "in args] + [\"--{}={}\".format(x, y) for x, y in kwargs.items()]", "\"\"\"Creates a full copy of a directory inside a temporary", "tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env)", "self.verbose: print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except process.kill() process.wait()", "\"\"\"Provides access to a set of descriptors for the parametrization", "# pylint: disable=too-many-arguments def __init__( self, deterministic: bool = True,", "def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None)", "None if cwd is None else str(cwd) self.env = env", "as process: try: assert process.stdout is not None for line", "supported by the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore", "stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try: assert process.stdout is", "the location where the command must run from Returns -------", "optimizers. \"\"\" # TODO add repr # pylint: disable=too-many-arguments def", "stdout Parameters ---------- command: list command to run, as a", "must be provided as a list\") self.command = command self.verbose", "killed for an unknown reason.\") stderr = process.communicate()[1] # we", "**kwargs: tp.Any) -> str: \"\"\"Call the cammand line with addidional", "processing \"\"\" class CommandFunction: \"\"\"Wraps a command as a function", "created copy path is available through the copyname attribute -", "a function in order to make sure it goes through", "\"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str])", "be provided as a list\") self.command = command self.verbose =", "ordered self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective def __and__(self, other:", "__repr__(self) -> str: diff = \",\".join(f\"{x}={y}\" for x, y in", "the function Errors are provided with the internal stderr \"\"\"", "x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This", "\"Descriptors\") -> \"Descriptors\": values = {field: getattr(self, field) & getattr(other,", "descriptors for the parametrization This can be used within optimizers.", "fails, or sent as output of the function Errors are", "str], dir: tp.Optional[tp.Union[Path, str]] = None) -> None: if dir", "are provided with the internal stderr \"\"\" # TODO make", "Path]] = None, env: tp.Optional[tp.Dict[str, str]] = None) -> None:", "can be used as TemporaryDirectory but: - the created copy", "__and__(self, other: \"Descriptors\") -> \"Descriptors\": values = {field: getattr(self, field)", "- the contextmanager returns the clean copy path - the", "from pathlib import Path from nevergrad.common import tools as ngtools", "can be controlled through the CLEAN_COPY_DIRECTORY environment variable \"\"\" key", "variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path,", "this source tree. import os import sys import shutil import", "-> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable in order for", "copies. \"\"\" assert Path(directory).exists(), \"Directory does not exist\" os.environ[cls.key] =", "The output is a string containing everything that has been", "pylint: disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job got killed for an", "full_command = self.command + [str(x) for x in args] +", "NotSupportedError(RuntimeError): \"\"\"This type of operation is not supported by the", "to stdout Parameters ---------- command: list command to run, as", "(retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode,", "a list\") self.command = command self.verbose = verbose self.cwd =", "for x, y in kwargs.items()] if self.verbose: print(f\"The following command", "Rights Reserved. # # This source code is licensed under", "pathlib import Path from nevergrad.common import tools as ngtools class", "is sent: {full_command}\") outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE,", "make the following command more robust (probably fails in multiple", "runtime cwd: Path/str path to the location where the command", "env: tp.Optional[tp.Dict[str, str]] = None) -> None: if not isinstance(command,", "keyword arguments will be sent as --{key}={val} The logs are", "\"\"\"This type of operation is not supported by the parameter.", "str]] = None) -> None: if not isinstance(command, list): raise", "and stdout at runtime cwd: Path/str path to the location", "# LICENSE file in the root directory of this source", "\"\"\"Job failed during processing \"\"\" class CommandFunction: \"\"\"Wraps a command", "available through the copyname attribute - the contextmanager returns the", "subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from subprocess_error return stdout", "from Returns ------- str Everything that has been sent to", "print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except process.kill() process.wait() raise", "verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]] = None, env:", "found in the # LICENSE file in the root directory", "typing as tp from pathlib import Path from nevergrad.common import", "@classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None: \"\"\"Sets the", "repr # pylint: disable=too-many-arguments def __init__( self, deterministic: bool =", "directory where the temporary directory will be created can be", "None) -> None: if not isinstance(command, list): raise TypeError(\"The command", "import shutil import tempfile import subprocess import typing as tp", "= continuous self.metrizable = metrizable self.ordered = ordered self.monoobjective =", "Parameters ---------- command: list command to run, as a list", "/ Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path: super().__enter__() return", "set of descriptors for the parametrization This can be used", "False, cwd: tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str, str]] =", "disable=redefined-builtin def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] =", "the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates a", "metrizable self.ordered = ordered self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective", "command: list command to run, as a list verbose: bool", "a full copy of a directory inside a temporary directory", "job fails, or sent as output of the function Errors", "stdout \"\"\" def __init__(self, command: tp.List[str], verbose: bool = False,", "x in args] + [\"--{}={}\".format(x, y) for x, y in", "\"\"\"Call the cammand line with addidional arguments The keyword arguments", "if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode())", "# pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path,", "Path(directory).exists(), \"Directory does not exist\" os.environ[cls.key] = str(directory) # pylint:", "function in order to make sure it goes through the", "of this source tree. import os import sys import shutil", "temporary directory This class can be used as TemporaryDirectory but:", "monoobjective: bool = True, not_manyobjective: bool = True, continuous: bool", "def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY", "= \",\".join(f\"{x}={y}\" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\"", "the temporary directory will be created can be controlled through", "__init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None) ->", "or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args,", "ngtools class Descriptors: \"\"\"Provides access to a set of descriptors", "\"\"\" assert Path(directory).exists(), \"Directory does not exist\" os.environ[cls.key] = str(directory)", "& getattr(other, field) for field in self.__dict__} return Descriptors(**values) def", "parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates a full", "deterministic self.deterministic_function = deterministic_function self.continuous = continuous self.metrizable = metrizable", "copy path - the directory where the temporary directory will", "as output of the function Errors are provided with the", "from nevergrad.common import tools as ngtools class Descriptors: \"\"\"Provides access", "------- str Everything that has been sent to stdout \"\"\"", "None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname))", "TODO add repr # pylint: disable=too-many-arguments def __init__( self, deterministic:", "class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates a full copy of", "self.verbose = verbose self.cwd = None if cwd is None", "as a list\") self.command = command self.verbose = verbose self.cwd", "Errors are provided with the internal stderr \"\"\" # TODO", "the job fails, or sent as output of the function", "dir: tp.Optional[tp.Union[Path, str]] = None) -> None: if dir is", "Facebook, Inc. and its affiliates. All Rights Reserved. # #", "file in the root directory of this source tree. import", "command is sent: {full_command}\") outlines: tp.List[str] = [] with subprocess.Popen(full_command,", "self.env = env def __call__(self, *args: tp.Any, **kwargs: tp.Any) ->", "process.poll() if stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if", "raise TypeError(\"The command must be provided as a list\") self.command", "bool = True, continuous: bool = True, metrizable: bool =", "goes through the pipeline and notify when it is finished.", "for an unknown reason.\") stderr = process.communicate()[1] # we already", "Path/str path to the location where the command must run", "through the pipeline and notify when it is finished. The", "This source code is licensed under the MIT license found", "for line in iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip())", "All Rights Reserved. # # This source code is licensed", "containing everything that has been sent to stdout Parameters ----------", "outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd,", "as ngtools class Descriptors: \"\"\"Provides access to a set of", "x, y in kwargs.items()] if self.verbose: print(f\"The following command is", "deterministic_function: bool = True, monoobjective: bool = True, not_manyobjective: bool", "dir is None: dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname", "return self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed during processing \"\"\" class", "as a list verbose: bool prints the command and stdout", "break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception: # pylint:", "printed if the job fails, or sent as output of", "metrizable: bool = True, ordered: bool = True, ) ->", "output is a string containing everything that has been sent", "key = \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) ->", "[\"--{}={}\".format(x, y) for x, y in kwargs.items()] if self.verbose: print(f\"The", "the created copy path is available through the copyname attribute", "print(f\"The following command is sent: {full_command}\") outlines: tp.List[str] = []", "def __init__( self, deterministic: bool = True, deterministic_function: bool =", "licensed under the MIT license found in the # LICENSE", "= {field: getattr(self, field) & getattr(other, field) for field in", "a directory inside a temporary directory This class can be", "a string containing everything that has been sent to stdout", "# Copyright (c) Facebook, Inc. and its affiliates. All Rights", "Path from nevergrad.common import tools as ngtools class Descriptors: \"\"\"Provides", "full copy of a directory inside a temporary directory This", "order to make sure it goes through the pipeline and", "the cammand line with addidional arguments The keyword arguments will", "verbose: bool prints the command and stdout at runtime cwd:", "not isinstance(command, list): raise TypeError(\"The command must be provided as", "--{key}={val} The logs are bufferized. They will be printed if", "MIT license found in the # LICENSE file in the", "set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment", "FailedJobError(RuntimeError): \"\"\"Job failed during processing \"\"\" class CommandFunction: \"\"\"Wraps a", "failed during processing \"\"\" class CommandFunction: \"\"\"Wraps a command as", "with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try:", "sys import shutil import tempfile import subprocess import typing as", "within optimizers. \"\"\" # TODO add repr # pylint: disable=too-many-arguments", "field in self.__dict__} return Descriptors(**values) def __repr__(self) -> str: diff", "calls to use this directory as base for the copies.", "try: assert process.stdout is not None for line in iter(process.stdout.readline,", "the copies. \"\"\" assert Path(directory).exists(), \"Directory does not exist\" os.environ[cls.key]", "= process.poll() if stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr)", "tools as ngtools class Descriptors: \"\"\"Provides access to a set", "as base for the copies. \"\"\" assert Path(directory).exists(), \"Directory does", "be used within optimizers. \"\"\" # TODO add repr #", "process.stdout is not None for line in iter(process.stdout.readline, b''): if", "self.ordered = ordered self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective def", "else str(cwd) self.env = env def __call__(self, *args: tp.Any, **kwargs:", "can be used within optimizers. \"\"\" # TODO add repr", "[str(x) for x in args] + [\"--{}={}\".format(x, y) for x,", "= not_manyobjective def __and__(self, other: \"Descriptors\") -> \"Descriptors\": values =", "= Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path:", "returns the clean copy path - the directory where the", "self.command + [str(x) for x in args] + [\"--{}={}\".format(x, y)", "tp.Union[Path, str]) -> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable in", "---------- command: list command to run, as a list verbose:", "# pylint: disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job got killed for", "verbose self.cwd = None if cwd is None else str(cwd)", "str]] = None) -> None: if dir is None: dir", "directory This class can be used as TemporaryDirectory but: -", "= monoobjective self.not_manyobjective = not_manyobjective def __and__(self, other: \"Descriptors\") ->", "as TemporaryDirectory but: - the created copy path is available", "diff = \",\".join(f\"{x}={y}\" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return", "in the # LICENSE file in the root directory of", "self.continuous = continuous self.metrizable = metrizable self.ordered = ordered self.monoobjective", "ignore \"\"\"Creates a full copy of a directory inside a", "None: if dir is None: dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\",", "if dir is None: dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir)", "function Errors are provided with the internal stderr \"\"\" #", "is None: dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname =", "for subsequent calls to use this directory as base for", "class NotSupportedError(RuntimeError): \"\"\"This type of operation is not supported by", "of a directory inside a temporary directory This class can", "= None if cwd is None else str(cwd) self.env =", "not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except Exception:", "= True, metrizable: bool = True, ordered: bool = True,", "-> str: diff = \",\".join(f\"{x}={y}\" for x, y in sorted(ngtools.different_from_defaults(instance=self,", "unknown reason.\") stderr = process.communicate()[1] # we already got stdout", "- the directory where the temporary directory will be created", "= self.command + [str(x) for x in args] + [\"--{}={}\".format(x,", "pipeline and notify when it is finished. The output is", "str]) -> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable in order", "for the parametrization This can be used within optimizers. \"\"\"", "import os import sys import shutil import tempfile import subprocess", "\"\"\" # TODO make the following command more robust (probably", "-> \"Descriptors\": values = {field: getattr(self, field) & getattr(other, field)", "list command to run, as a list verbose: bool prints", "of operation is not supported by the parameter. \"\"\" class", "command self.verbose = verbose self.cwd = None if cwd is", "(c) Facebook, Inc. and its affiliates. All Rights Reserved. #", "type: ignore \"\"\"Creates a full copy of a directory inside", "*args: tp.Any, **kwargs: tp.Any) -> str: \"\"\"Call the cammand line", "of the function Errors are provided with the internal stderr", "that has been sent to stdout \"\"\" def __init__(self, command:", "-> None: if dir is None: dir = os.environ.get(self.key, None)", "directory of this source tree. import os import sys import", "for x in args] + [\"--{}={}\".format(x, y) for x, y", "self.cwd = None if cwd is None else str(cwd) self.env", "that has been sent to stdout Parameters ---------- command: list", "self, deterministic: bool = True, deterministic_function: bool = True, monoobjective:", "{field: getattr(self, field) & getattr(other, field) for field in self.__dict__}", "multiple cases) full_command = self.command + [str(x) for x in", "= process.communicate()[1] # we already got stdout stdout = \"\\n\".join(outlines)", "y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type", "Exception: # pylint: disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job got killed", "-> None: if not isinstance(command, list): raise TypeError(\"The command must", "-> str: \"\"\"Call the cammand line with addidional arguments The", "process.communicate()[1] # we already got stdout stdout = \"\\n\".join(outlines) retcode", "bool = True, not_manyobjective: bool = True, continuous: bool =", "nevergrad.common import tools as ngtools class Descriptors: \"\"\"Provides access to", "values = {field: getattr(self, field) & getattr(other, field) for field", "self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout,", "are bufferized. They will be printed if the job fails,", "self.deterministic = deterministic self.deterministic_function = deterministic_function self.continuous = continuous self.metrizable", "path to the location where the command must run from", "self.__dict__} return Descriptors(**values) def __repr__(self) -> str: diff = \",\".join(f\"{x}={y}\"", "bufferized. They will be printed if the job fails, or", "True, monoobjective: bool = True, not_manyobjective: bool = True, continuous:", "root directory of this source tree. import os import sys", "is a string containing everything that has been sent to", "sent to stdout \"\"\" def __init__(self, command: tp.List[str], verbose: bool", "by the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates", "not None for line in iter(process.stdout.readline, b''): if not line:", "to the location where the command must run from Returns", "os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path,", "provided as a list\") self.command = command self.verbose = verbose", "import typing as tp from pathlib import Path from nevergrad.common", "for the copies. \"\"\" assert Path(directory).exists(), \"Directory does not exist\"", "and notify when it is finished. The output is a", "disable=broad-except process.kill() process.wait() raise FailedJobError(\"Job got killed for an unknown", "directory: tp.Union[Path, str]) -> None: \"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable", "it is finished. The output is a string containing everything", "FailedJobError(\"Job got killed for an unknown reason.\") stderr = process.communicate()[1]", "the command and stdout at runtime cwd: Path/str path to", "not exist\" os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin def __init__(self,", "the # LICENSE file in the root directory of this", "# type: ignore \"\"\"Creates a full copy of a directory", "str(self.copyname)) def __enter__(self) -> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError):", "Descriptors: \"\"\"Provides access to a set of descriptors for the", "if stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode:", "\",\".join(f\"{x}={y}\" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class", "monoobjective self.not_manyobjective = not_manyobjective def __and__(self, other: \"Descriptors\") -> \"Descriptors\":", "= [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as", "tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]] = None) -> None: if", "in the root directory of this source tree. import os", "continuous: bool = True, metrizable: bool = True, ordered: bool", "command to run, as a list verbose: bool prints the", "\"\"\" # TODO add repr # pylint: disable=too-many-arguments def __init__(", "access to a set of descriptors for the parametrization This", "env def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str: \"\"\"Call", "type of operation is not supported by the parameter. \"\"\"", "subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from subprocess_error", "through the CLEAN_COPY_DIRECTORY environment variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod", "\"\"\"Sets the CLEAN_COPY_DIRECTORY environment variable in order for subsequent calls", "The logs are bufferized. They will be printed if the", "# TODO make the following command more robust (probably fails", "the command must run from Returns ------- str Everything that", "f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type of operation is not supported", "the clean copy path - the directory where the temporary", "\"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None: \"\"\"Sets", "tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]] = None,", "retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise FailedJobError(stderr.decode()) from", "= str(directory) # pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path, str],", "Reserved. # # This source code is licensed under the", "stderr and (retcode or self.verbose): print(stderr.decode(), file=sys.stderr) if retcode: subprocess_error", "the pipeline and notify when it is finished. The output", "got stdout stdout = \"\\n\".join(outlines) retcode = process.poll() if stderr", "inside a temporary directory This class can be used as", "self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective def __and__(self, other: \"Descriptors\")", "if not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True) except", "sent as --{key}={val} The logs are bufferized. They will be", "class can be used as TemporaryDirectory but: - the created", "be printed if the job fails, or sent as output", "run from Returns ------- str Everything that has been sent", "cammand line with addidional arguments The keyword arguments will be", "the internal stderr \"\"\" # TODO make the following command", "code is licensed under the MIT license found in the", "notify when it is finished. The output is a string", "where the command must run from Returns ------- str Everything", "source code is licensed under the MIT license found in", "isinstance(command, list): raise TypeError(\"The command must be provided as a", "output of the function Errors are provided with the internal", "prints the command and stdout at runtime cwd: Path/str path", "import sys import shutil import tempfile import subprocess import typing", "as a function in order to make sure it goes", "(probably fails in multiple cases) full_command = self.command + [str(x)", "in order for subsequent calls to use this directory as", "= ordered self.monoobjective = monoobjective self.not_manyobjective = not_manyobjective def __and__(self,", "None, env: tp.Optional[tp.Dict[str, str]] = None) -> None: if not", "# # This source code is licensed under the MIT", "contextmanager returns the clean copy path - the directory where", "base for the copies. \"\"\" assert Path(directory).exists(), \"Directory does not", "used within optimizers. \"\"\" # TODO add repr # pylint:", "kwargs.items()] if self.verbose: print(f\"The following command is sent: {full_command}\") outlines:", "TemporaryDirectory but: - the created copy path is available through", "exist\" os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin def __init__(self, source:", "if not isinstance(command, list): raise TypeError(\"The command must be provided", "arguments The keyword arguments will be sent as --{key}={val} The", "None: dir = os.environ.get(self.key, None) super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name)", "super().__enter__() return self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed during processing \"\"\"", "# This source code is licensed under the MIT license", "will be sent as --{key}={val} The logs are bufferized. They", "source tree. import os import sys import shutil import tempfile", "def __enter__(self) -> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): \"\"\"Job", "sent as output of the function Errors are provided with", "and its affiliates. All Rights Reserved. # # This source", "copy of a directory inside a temporary directory This class", "class CommandFunction: \"\"\"Wraps a command as a function in order", "bool = True, metrizable: bool = True, ordered: bool =", "check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type of operation is", "True, not_manyobjective: bool = True, continuous: bool = True, metrizable:", "None for line in iter(process.stdout.readline, b''): if not line: break", "deterministic: bool = True, deterministic_function: bool = True, monoobjective: bool", "copy path is available through the copyname attribute - the", "environment variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory:", "bool = False, cwd: tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str,", "tp.Any) -> str: \"\"\"Call the cammand line with addidional arguments", "self.not_manyobjective = not_manyobjective def __and__(self, other: \"Descriptors\") -> \"Descriptors\": values", "list): raise TypeError(\"The command must be provided as a list\")", "self.deterministic_function = deterministic_function self.continuous = continuous self.metrizable = metrizable self.ordered", "= True, deterministic_function: bool = True, monoobjective: bool = True,", "in order to make sure it goes through the pipeline", "assert process.stdout is not None for line in iter(process.stdout.readline, b''):", "process.wait() raise FailedJobError(\"Job got killed for an unknown reason.\") stderr", "as --{key}={val} The logs are bufferized. They will be printed", "The keyword arguments will be sent as --{key}={val} The logs", "pylint: disable=too-many-arguments def __init__( self, deterministic: bool = True, deterministic_function:", "= None) -> None: if dir is None: dir =", "run, as a list verbose: bool prints the command and", "ordered: bool = True, ) -> None: self.deterministic = deterministic", "line in iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip()) if", "is available through the copyname attribute - the contextmanager returns", "import Path from nevergrad.common import tools as ngtools class Descriptors:", "bool = True, monoobjective: bool = True, not_manyobjective: bool =", "= False, cwd: tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str, str]]", "more robust (probably fails in multiple cases) full_command = self.command", "# we already got stdout stdout = \"\\n\".join(outlines) retcode =", "y in kwargs.items()] if self.verbose: print(f\"The following command is sent:", "file=sys.stderr) if retcode: subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr) raise", "\"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type: ignore \"\"\"Creates a full copy", "is None else str(cwd) self.env = env def __call__(self, *args:", "deterministic_function self.continuous = continuous self.metrizable = metrizable self.ordered = ordered", "has been sent to stdout \"\"\" def __init__(self, command: tp.List[str],", "str: diff = \",\".join(f\"{x}={y}\" for x, y in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items()))", "command as a function in order to make sure it", "used as TemporaryDirectory but: - the created copy path is", "a set of descriptors for the parametrization This can be", "tp.Any, **kwargs: tp.Any) -> str: \"\"\"Call the cammand line with", "fails in multiple cases) full_command = self.command + [str(x) for", "This class can be used as TemporaryDirectory but: - the", "cwd=self.cwd, env=self.env) as process: try: assert process.stdout is not None", "None) -> None: if dir is None: dir = os.environ.get(self.key,", "= True, ) -> None: self.deterministic = deterministic self.deterministic_function =", "not_manyobjective def __and__(self, other: \"Descriptors\") -> \"Descriptors\": values = {field:", "iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1],", "super().__init__(prefix=\"tmp_clean_copy_\", dir=dir) self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def", "args] + [\"--{}={}\".format(x, y) for x, y in kwargs.items()] if", "variable in order for subsequent calls to use this directory", "in sorted(ngtools.different_from_defaults(instance=self, check_mismatches=True).items())) return f\"{self.__class__.__name__}({diff})\" class NotSupportedError(RuntimeError): \"\"\"This type of", "Inc. and its affiliates. All Rights Reserved. # # This", "copyname attribute - the contextmanager returns the clean copy path", "str(directory) # pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path, str], dir:", "the CLEAN_COPY_DIRECTORY environment variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\" @classmethod def", "as tp from pathlib import Path from nevergrad.common import tools", "import tools as ngtools class Descriptors: \"\"\"Provides access to a", "b''): if not line: break outlines.append(line.decode().strip()) if self.verbose: print(outlines[-1], flush=True)", "field) for field in self.__dict__} return Descriptors(**values) def __repr__(self) ->", "logs are bufferized. They will be printed if the job", "location where the command must run from Returns ------- str", "arguments will be sent as --{key}={val} The logs are bufferized.", "directory will be created can be controlled through the CLEAN_COPY_DIRECTORY", "dir=dir) self.copyname = Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self)", "os import sys import shutil import tempfile import subprocess import", "does not exist\" os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin def", "parametrization This can be used within optimizers. \"\"\" # TODO", "__enter__(self) -> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed", "[] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process:", "Path(self.name) / Path(source).name shutil.copytree(str(source), str(self.copyname)) def __enter__(self) -> Path: super().__enter__()", "tree. import os import sys import shutil import tempfile import", "-> Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed during", "None: self.deterministic = deterministic self.deterministic_function = deterministic_function self.continuous = continuous", "its affiliates. All Rights Reserved. # # This source code", "\"Directory does not exist\" os.environ[cls.key] = str(directory) # pylint: disable=redefined-builtin", "self.command = command self.verbose = verbose self.cwd = None if", "is licensed under the MIT license found in the #", "been sent to stdout \"\"\" def __init__(self, command: tp.List[str], verbose:", "CLEAN_COPY_DIRECTORY environment variable in order for subsequent calls to use", "Everything that has been sent to stdout \"\"\" def __init__(self,", "str: \"\"\"Call the cammand line with addidional arguments The keyword", "tp.Optional[tp.Union[str, Path]] = None, env: tp.Optional[tp.Dict[str, str]] = None) ->", "\"\"\" class CommandFunction: \"\"\"Wraps a command as a function in", "bool = True, ordered: bool = True, ) -> None:", "in multiple cases) full_command = self.command + [str(x) for x", "stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False, cwd=self.cwd, env=self.env) as process: try: assert process.stdout", "tempfile import subprocess import typing as tp from pathlib import", "with addidional arguments The keyword arguments will be sent as", "= command self.verbose = verbose self.cwd = None if cwd", "getattr(self, field) & getattr(other, field) for field in self.__dict__} return", "everything that has been sent to stdout Parameters ---------- command:", "= deterministic_function self.continuous = continuous self.metrizable = metrizable self.ordered =", "bool prints the command and stdout at runtime cwd: Path/str", "be sent as --{key}={val} The logs are bufferized. They will", "attribute - the contextmanager returns the clean copy path -", "- the created copy path is available through the copyname", "with the internal stderr \"\"\" # TODO make the following", "\"Descriptors\": values = {field: getattr(self, field) & getattr(other, field) for", "= None, env: tp.Optional[tp.Dict[str, str]] = None) -> None: if", "{full_command}\") outlines: tp.List[str] = [] with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=False,", "be used as TemporaryDirectory but: - the created copy path", "command more robust (probably fails in multiple cases) full_command =", "directory inside a temporary directory This class can be used", "= \"CLEAN_COPY_DIRECTORY\" @classmethod def set_clean_copy_environment_variable(cls, directory: tp.Union[Path, str]) -> None:", "= None) -> None: if not isinstance(command, list): raise TypeError(\"The", "already got stdout stdout = \"\\n\".join(outlines) retcode = process.poll() if", "line with addidional arguments The keyword arguments will be sent", "affiliates. All Rights Reserved. # # This source code is", "bool = True, ) -> None: self.deterministic = deterministic self.deterministic_function", "in iter(process.stdout.readline, b''): if not line: break outlines.append(line.decode().strip()) if self.verbose:", "Descriptors(**values) def __repr__(self) -> str: diff = \",\".join(f\"{x}={y}\" for x,", "make sure it goes through the pipeline and notify when", "Path: super().__enter__() return self.copyname class FailedJobError(RuntimeError): \"\"\"Job failed during processing", "= env def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str:", "\"\\n\".join(outlines) retcode = process.poll() if stderr and (retcode or self.verbose):", "= metrizable self.ordered = ordered self.monoobjective = monoobjective self.not_manyobjective =", "__init__(self, command: tp.List[str], verbose: bool = False, cwd: tp.Optional[tp.Union[str, Path]]", "if cwd is None else str(cwd) self.env = env def", "got killed for an unknown reason.\") stderr = process.communicate()[1] #", "provided with the internal stderr \"\"\" # TODO make the", "if self.verbose: print(outlines[-1], flush=True) except Exception: # pylint: disable=broad-except process.kill()", "self.verbose: print(f\"The following command is sent: {full_command}\") outlines: tp.List[str] =", "stdout stdout = \"\\n\".join(outlines) retcode = process.poll() if stderr and", "class Descriptors: \"\"\"Provides access to a set of descriptors for", "cwd: Path/str path to the location where the command must", "stderr = process.communicate()[1] # we already got stdout stdout =", "the copyname attribute - the contextmanager returns the clean copy", "retcode = process.poll() if stderr and (retcode or self.verbose): print(stderr.decode(),", "cwd is None else str(cwd) self.env = env def __call__(self,", "order for subsequent calls to use this directory as base", "process: try: assert process.stdout is not None for line in", "path is available through the copyname attribute - the contextmanager", "a temporary directory This class can be used as TemporaryDirectory", "TODO make the following command more robust (probably fails in", "field) & getattr(other, field) for field in self.__dict__} return Descriptors(**values)", "during processing \"\"\" class CommandFunction: \"\"\"Wraps a command as a", "been sent to stdout Parameters ---------- command: list command to", "= \"\\n\".join(outlines) retcode = process.poll() if stderr and (retcode or", "pylint: disable=redefined-builtin def __init__(self, source: tp.Union[Path, str], dir: tp.Optional[tp.Union[Path, str]]", "not supported by the parameter. \"\"\" class TemporaryDirectoryCopy(tempfile.TemporaryDirectory): # type:", "controlled through the CLEAN_COPY_DIRECTORY environment variable \"\"\" key = \"CLEAN_COPY_DIRECTORY\"" ]
[ "import paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\")", "ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\\\Users\\\\Automation\\\\Desktop\\\\download_file.txt')", "= paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\\\Users\\\\Automation\\\\Desktop\\\\download_file.txt') sftp_client.put(\"transfer_files.py\",'/home/ec2-user/transfer_files.py')", "paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\\\Users\\\\Automation\\\\Desktop\\\\download_file.txt') sftp_client.put(\"transfer_files.py\",'/home/ec2-user/transfer_files.py') sftp_client.close()", "<gh_stars>10-100 import paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt')", "paramiko ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\") #print(sftp_client.getcwd())", "ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname='172.16.58.3',username='ec2-user',password='<PASSWORD>',port=22) sftp_client=ssh.open_sftp() #sftp_client.get('/home/ec2-user/paramiko_download.txt','paramiko_downloaded_file.txt') #sftp_client.chdir(\"/home/ec2-user\") #print(sftp_client.getcwd()) #sftp_client.get('demo.txt','C:\\\\Users\\\\Automation\\\\Desktop\\\\download_file.txt') sftp_client.put(\"transfer_files.py\",'/home/ec2-user/transfer_files.py') sftp_client.close() ssh.close()" ]
[ "StringIO except ImportError: from io import StringIO debug_on = False", "pstats import Stats try: from StringIO import StringIO except ImportError:", "# Profiling functions profiler = Profile() profiler_running = False def", "running in debug mode.\") # Debug printer def print_debug(*args, **kwargs):", "profiler_running = True try: profiler.enable() return func(*args, **kwargs) finally: profiler.disable()", "finally: profiler.disable() profiler_running = False return _profile_wrapper def print_profile_data(): \"\"\"", "**kwargs): global profiler_running if not profiler_running: profiler_running = True try:", "debug mode.\") # Debug printer def print_debug(*args, **kwargs): \"\"\" Print", "profiler_running = False def profile_func(func): \"\"\" Decorator which profiles a", "collected data. :type func: Callable :rtype: Callable \"\"\" @wraps(func) def", "False return _profile_wrapper def print_profile_data(): \"\"\" Print the collected profile", "profiler.disable() profiler_running = False return _profile_wrapper def print_profile_data(): \"\"\" Print", "import Stats try: from StringIO import StringIO except ImportError: from", "import wraps from pstats import Stats try: from StringIO import", "func(*args, **kwargs) finally: profiler.disable() profiler_running = False return _profile_wrapper def", "NimLime development & testing. \"\"\" from pprint import pprint import", "from io import StringIO debug_on = False if debug_on: sublime.message_dialog(\"NimLime", "ImportError: from profile import Profile from functools import wraps from", "profiler_running if not profiler_running: profiler_running = True try: profiler.enable() return", "try: from cProfile import Profile except ImportError: from profile import", "Profiling functions profiler = Profile() profiler_running = False def profile_func(func):", "print_debug(*args, **kwargs): \"\"\" Print when debugging. :type args: Any :type", "import StringIO except ImportError: from io import StringIO debug_on =", "Stats try: from StringIO import StringIO except ImportError: from io", "**kwargs): \"\"\" Print when debugging. :type args: Any :type kwargs:", "_profile_wrapper def print_profile_data(): \"\"\" Print the collected profile data. \"\"\"", "# coding=utf-8 \"\"\" Internal tools for NimLime development & testing.", "Callable :rtype: Callable \"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running", "@wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running if not profiler_running: profiler_running", "\"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running if not profiler_running:", "profiler_running: profiler_running = True try: profiler.enable() return func(*args, **kwargs) finally:", "stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative') statistics.print_stats() print(stream.getvalue())", "testing. \"\"\" from pprint import pprint import sublime try: from", "= False if debug_on: sublime.message_dialog(\"NimLime running in debug mode.\") #", "cProfile import Profile except ImportError: from profile import Profile from", "for NimLime development & testing. \"\"\" from pprint import pprint", "\"\"\" if debug_on: pprint(*args, **kwargs) # Profiling functions profiler =", "io import StringIO debug_on = False if debug_on: sublime.message_dialog(\"NimLime running", "from pprint import pprint import sublime try: from cProfile import", "try: profiler.enable() return func(*args, **kwargs) finally: profiler.disable() profiler_running = False", "def _profile_wrapper(*args, **kwargs): global profiler_running if not profiler_running: profiler_running =", "pprint import pprint import sublime try: from cProfile import Profile", "tools for NimLime development & testing. \"\"\" from pprint import", "def print_debug(*args, **kwargs): \"\"\" Print when debugging. :type args: Any", "False def profile_func(func): \"\"\" Decorator which profiles a single function.", "print_profile_data(): \"\"\" Print the collected profile data. \"\"\" stream =", "from cProfile import Profile except ImportError: from profile import Profile", "profiles a single function. Call print_profile_data to print the collected", "print_profile_data to print the collected data. :type func: Callable :rtype:", "printer def print_debug(*args, **kwargs): \"\"\" Print when debugging. :type args:", "wraps from pstats import Stats try: from StringIO import StringIO", "from functools import wraps from pstats import Stats try: from", "print the collected data. :type func: Callable :rtype: Callable \"\"\"", "from pstats import Stats try: from StringIO import StringIO except", "collected profile data. \"\"\" stream = StringIO() statistics = Stats(profiler,", "profiler_running = False return _profile_wrapper def print_profile_data(): \"\"\" Print the", "= True try: profiler.enable() return func(*args, **kwargs) finally: profiler.disable() profiler_running", "**kwargs) # Profiling functions profiler = Profile() profiler_running = False", "import Profile except ImportError: from profile import Profile from functools", "profile data. \"\"\" stream = StringIO() statistics = Stats(profiler, stream=stream)", "Profile from functools import wraps from pstats import Stats try:", "Decorator which profiles a single function. Call print_profile_data to print", "**kwargs) finally: profiler.disable() profiler_running = False return _profile_wrapper def print_profile_data():", "pprint import sublime try: from cProfile import Profile except ImportError:", "func: Callable :rtype: Callable \"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs): global", "\"\"\" stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative') statistics.print_stats()", "Print the collected profile data. \"\"\" stream = StringIO() statistics", "from StringIO import StringIO except ImportError: from io import StringIO", "False if debug_on: sublime.message_dialog(\"NimLime running in debug mode.\") # Debug", "import sublime try: from cProfile import Profile except ImportError: from", "import StringIO debug_on = False if debug_on: sublime.message_dialog(\"NimLime running in", "global profiler_running if not profiler_running: profiler_running = True try: profiler.enable()", "in debug mode.\") # Debug printer def print_debug(*args, **kwargs): \"\"\"", "\"\"\" Decorator which profiles a single function. Call print_profile_data to", "function. Call print_profile_data to print the collected data. :type func:", "= False def profile_func(func): \"\"\" Decorator which profiles a single", "development & testing. \"\"\" from pprint import pprint import sublime", "the collected profile data. \"\"\" stream = StringIO() statistics =", "Any :type kwargs: Any \"\"\" if debug_on: pprint(*args, **kwargs) #", "Print when debugging. :type args: Any :type kwargs: Any \"\"\"", "StringIO debug_on = False if debug_on: sublime.message_dialog(\"NimLime running in debug", "\"\"\" Internal tools for NimLime development & testing. \"\"\" from", "not profiler_running: profiler_running = True try: profiler.enable() return func(*args, **kwargs)", "Profile except ImportError: from profile import Profile from functools import", "from profile import Profile from functools import wraps from pstats", "if debug_on: sublime.message_dialog(\"NimLime running in debug mode.\") # Debug printer", "sublime.message_dialog(\"NimLime running in debug mode.\") # Debug printer def print_debug(*args,", ":rtype: Callable \"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running if", "when debugging. :type args: Any :type kwargs: Any \"\"\" if", "& testing. \"\"\" from pprint import pprint import sublime try:", "try: from StringIO import StringIO except ImportError: from io import", "args: Any :type kwargs: Any \"\"\" if debug_on: pprint(*args, **kwargs)", "ImportError: from io import StringIO debug_on = False if debug_on:", "def profile_func(func): \"\"\" Decorator which profiles a single function. Call", "debug_on = False if debug_on: sublime.message_dialog(\"NimLime running in debug mode.\")", "Callable \"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs): global profiler_running if not", ":type args: Any :type kwargs: Any \"\"\" if debug_on: pprint(*args,", "the collected data. :type func: Callable :rtype: Callable \"\"\" @wraps(func)", "to print the collected data. :type func: Callable :rtype: Callable", "functions profiler = Profile() profiler_running = False def profile_func(func): \"\"\"", "mode.\") # Debug printer def print_debug(*args, **kwargs): \"\"\" Print when", "data. \"\"\" stream = StringIO() statistics = Stats(profiler, stream=stream) statistics.sort_stats('cumulative')", "debugging. :type args: Any :type kwargs: Any \"\"\" if debug_on:", "debug_on: sublime.message_dialog(\"NimLime running in debug mode.\") # Debug printer def", "except ImportError: from io import StringIO debug_on = False if", "coding=utf-8 \"\"\" Internal tools for NimLime development & testing. \"\"\"", "sublime try: from cProfile import Profile except ImportError: from profile", "if debug_on: pprint(*args, **kwargs) # Profiling functions profiler = Profile()", "= Profile() profiler_running = False def profile_func(func): \"\"\" Decorator which", "profile import Profile from functools import wraps from pstats import", "profiler.enable() return func(*args, **kwargs) finally: profiler.disable() profiler_running = False return", "a single function. Call print_profile_data to print the collected data.", "StringIO import StringIO except ImportError: from io import StringIO debug_on", "except ImportError: from profile import Profile from functools import wraps", "kwargs: Any \"\"\" if debug_on: pprint(*args, **kwargs) # Profiling functions", "which profiles a single function. Call print_profile_data to print the", "Internal tools for NimLime development & testing. \"\"\" from pprint", "Any \"\"\" if debug_on: pprint(*args, **kwargs) # Profiling functions profiler", "Debug printer def print_debug(*args, **kwargs): \"\"\" Print when debugging. :type", "data. :type func: Callable :rtype: Callable \"\"\" @wraps(func) def _profile_wrapper(*args,", "debug_on: pprint(*args, **kwargs) # Profiling functions profiler = Profile() profiler_running", ":type func: Callable :rtype: Callable \"\"\" @wraps(func) def _profile_wrapper(*args, **kwargs):", "single function. Call print_profile_data to print the collected data. :type", "functools import wraps from pstats import Stats try: from StringIO", "True try: profiler.enable() return func(*args, **kwargs) finally: profiler.disable() profiler_running =", "\"\"\" Print when debugging. :type args: Any :type kwargs: Any", "def print_profile_data(): \"\"\" Print the collected profile data. \"\"\" stream", "= False return _profile_wrapper def print_profile_data(): \"\"\" Print the collected", "\"\"\" from pprint import pprint import sublime try: from cProfile", "profiler = Profile() profiler_running = False def profile_func(func): \"\"\" Decorator", "if not profiler_running: profiler_running = True try: profiler.enable() return func(*args,", "\"\"\" Print the collected profile data. \"\"\" stream = StringIO()", "Call print_profile_data to print the collected data. :type func: Callable", "profile_func(func): \"\"\" Decorator which profiles a single function. Call print_profile_data", "import pprint import sublime try: from cProfile import Profile except", "pprint(*args, **kwargs) # Profiling functions profiler = Profile() profiler_running =", "# Debug printer def print_debug(*args, **kwargs): \"\"\" Print when debugging.", "_profile_wrapper(*args, **kwargs): global profiler_running if not profiler_running: profiler_running = True", "return _profile_wrapper def print_profile_data(): \"\"\" Print the collected profile data.", ":type kwargs: Any \"\"\" if debug_on: pprint(*args, **kwargs) # Profiling", "Profile() profiler_running = False def profile_func(func): \"\"\" Decorator which profiles", "return func(*args, **kwargs) finally: profiler.disable() profiler_running = False return _profile_wrapper", "import Profile from functools import wraps from pstats import Stats" ]
[ "\"identifier\": \"identif\" } }), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def", "patch from freezegun import freeze_time from driftage.monitor import Monitor class", "self.assertEqual( self.monitor._identifier, \"identif\" ) monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\" )", "\"data\": {\"my data\": 1}, \"metadata\": { \"timestamp\": 618883200.0, \"identifier\": \"identif\"", "1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\":", "import Monitor class TestMonitor(TestCase): def setUp(self): self.monitor = Monitor( \"user_test@local\",", "body=str(orjson.dumps({ \"data\": {\"my data\": 1}, \"metadata\": { \"timestamp\": 618883200.0, \"identifier\":", "= Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with(", "from freezegun import freeze_time from driftage.monitor import Monitor class TestMonitor(TestCase):", "= Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with(", "= Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" ) def tearDown(self): self.monitor.container.stop() def", "self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock,", "freezegun import freeze_time from driftage.monitor import Monitor class TestMonitor(TestCase): def", "tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" ) monitor =", "\"identif\" } }), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call(", "\"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock):", "\"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def", "618883200.0, \"identifier\": \"identif\" } }), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\")", "} }), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self,", "TestCase, Mock, patch from freezegun import freeze_time from driftage.monitor import", ") def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" )", "\"identif\" ) monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier,", "1}, \"metadata\": { \"timestamp\": 618883200.0, \"identifier\": \"identif\" } }), \"utf-8\")", "self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with(", "monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour", "\"timestamp\": 618883200.0, \"identifier\": \"identif\" } }), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\")", "self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value )", "self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self,", "self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\":", "= Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop()", "test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() )", "import orjson from asynctest import TestCase, Mock, patch from freezegun", "@freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour =", "}), \"utf-8\") ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock,", "\"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async", "\"<PASSWORD>\", \"identif\" ) def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier,", "@patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock()", "@patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup()", "Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def", "= Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\")", "self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" ) monitor = Monitor(", "test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" ) monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\"", "{ \"timestamp\": 618883200.0, \"identifier\": \"identif\" } }), \"utf-8\") ) @freeze_time(\"1989-08-12\")", "driftage.monitor import Monitor class TestMonitor(TestCase): def setUp(self): self.monitor = Monitor(", "\"user_test@local\", \"<PASSWORD>\", \"identif\" ) def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual(", "test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\": 1})", "Mock, patch from freezegun import freeze_time from driftage.monitor import Monitor", "\"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour =", "template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(),", ") monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock()", "self.monitor.add_behaviour = Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value )", "behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock):", "await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data(", "data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my", "@patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my", "from asynctest import TestCase, Mock, patch from freezegun import freeze_time", "behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value", ") monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\"", "template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\": 1}, \"metadata\": {", "def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock()", "def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my data\":", "Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" ) def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self):", "def setUp(self): self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" ) def", "orjson from asynctest import TestCase, Mock, patch from freezegun import", "behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\")", "from driftage.monitor import Monitor class TestMonitor(TestCase): def setUp(self): self.monitor =", "@freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour =", "test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my data\": 1})", ") @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour", "Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({", "self.monitor._identifier, \"identif\" ) monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual(", "asynctest import TestCase, Mock, patch from freezegun import freeze_time from", "def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" ) monitor", "data\": 1}, \"metadata\": { \"timestamp\": 618883200.0, \"identifier\": \"identif\" } }),", "template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(),", "Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({", "def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\":", "{\"my data\": 1}, \"metadata\": { \"timestamp\": 618883200.0, \"identifier\": \"identif\" }", "\"metadata\": { \"timestamp\": 618883200.0, \"identifier\": \"identif\" } }), \"utf-8\") )", "def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\" ) monitor = Monitor( \"user_test2@local\",", "self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with( behaviour_mock() ) @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\")", "self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock):", "TestMonitor(TestCase): def setUp(self): self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" )", ") self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self,", ") template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\": 1}, \"metadata\": { \"timestamp\":", "async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await self.monitor.setup() self.monitor.add_behaviour.assert_called_once_with(", "monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\") async def test_should_add_subscription_behaviour(self, behaviour_mock): self.monitor.add_behaviour = Mock() await", "@patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my", "setUp(self): self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" ) def tearDown(self):", "Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\" ) monitor.container.stop() @patch(\"driftage.monitor.WaitMonitorSubscriptions\")", "monitor = Monitor( \"user_test2@local\", \"<PASSWORD>\" ) self.assertEqual( monitor._identifier, \"user_test2\" )", "Monitor class TestMonitor(TestCase): def setUp(self): self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\",", "self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\" ) def tearDown(self): self.monitor.container.stop()", "\"identif\" ) def tearDown(self): self.monitor.container.stop() def test_should_set_identifier_or_agent_name(self): self.assertEqual( self.monitor._identifier, \"identif\"", "self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\": 1},", "template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\": 1}, \"metadata\": { \"timestamp\": 618883200.0,", "class TestMonitor(TestCase): def setUp(self): self.monitor = Monitor( \"user_test@local\", \"<PASSWORD>\", \"identif\"", "behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor.collect({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value", "behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\": {\"my data\": 1}, \"metadata\":", "freeze_time from driftage.monitor import Monitor class TestMonitor(TestCase): def setUp(self): self.monitor", "import TestCase, Mock, patch from freezegun import freeze_time from driftage.monitor", "self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock() self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with(", "import freeze_time from driftage.monitor import Monitor class TestMonitor(TestCase): def setUp(self):", "self.monitor({\"my data\": 1}) self.monitor.add_behaviour.assert_called_once_with( behaviour_mock(), template=template_mock.return_value ) template_mock.assert_called_once_with( body=str(orjson.dumps({ \"data\":", "@patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data_with_call( self, template_mock, behaviour_mock): self.monitor.add_behaviour = Mock()", ") @freeze_time(\"1989-08-12\") @patch(\"driftage.monitor.FastNotifyContacts\") @patch(\"driftage.monitor.Template\") def test_should_notify_contacts_on_new_data( self, template_mock, behaviour_mock): self.monitor.add_behaviour" ]
[ "import fastarg import commands.todo as todo import commands.user as user", "str): \"\"\"hello world\"\"\" print(\"hello \" + name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app,", "as todo import commands.user as user app = fastarg.Fastarg(description=\"productivity app\",", "fastarg import commands.todo as todo import commands.user as user app", "app\", prog=\"todo\") @app.command() def hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello \"", "commands.todo as todo import commands.user as user app = fastarg.Fastarg(description=\"productivity", "fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello", "import commands.user as user app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command()", "user app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def hello_world(name: str):", "\" + name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\") if __name__ ==", "hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello \" + name) app.add_fastarg(todo.app, name=\"todo\")", "<filename>examples/todo_advanced/main.py import fastarg import commands.todo as todo import commands.user as", "commands.user as user app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def", "print(\"hello \" + name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\") if __name__", "world\"\"\" print(\"hello \" + name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\") if", "app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def hello_world(name: str): \"\"\"hello", "as user app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def hello_world(name:", "name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\") if __name__ == \"__main__\": app.run()", "@app.command() def hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello \" + name)", "import commands.todo as todo import commands.user as user app =", "prog=\"todo\") @app.command() def hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello \" +", "\"\"\"hello world\"\"\" print(\"hello \" + name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\")", "def hello_world(name: str): \"\"\"hello world\"\"\" print(\"hello \" + name) app.add_fastarg(todo.app,", "+ name) app.add_fastarg(todo.app, name=\"todo\") app.add_fastarg(user.app, name=\"user\") if __name__ == \"__main__\":", "= fastarg.Fastarg(description=\"productivity app\", prog=\"todo\") @app.command() def hello_world(name: str): \"\"\"hello world\"\"\"", "todo import commands.user as user app = fastarg.Fastarg(description=\"productivity app\", prog=\"todo\")" ]
[ "await amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await", "try: declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\",", "asyncio import uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import", "for i in range(10) ] _, pending = await asyncio.wait(messages,", "no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await queue.get() assert message.body", "channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue,", "amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare()", "b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel =", "= str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare( qname,", "= await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b\"urgent\"", "loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def bad_consumer(message):", "await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert", "exchange, routing_key=\"test.5\", ) for i in range(10): messages = [", "b\"foo bar\" async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number", "aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError):", "\"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection,", "def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await proxy_connection.channel() # type:", "queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue)", "queue.get() # type: DeliveredMessage assert message.body == b\"foo\" cancel_ok =", "== b\"urgent\" future = loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True,", "channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future = loop.create_future() await channel.basic_publish(b\"urgent\",", "assert message.body == b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel =", "= await amqp_connection.channel() channel1 = await amqp_connection.channel() qname = str(uuid.uuid4())", "b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for i in range(10) ] _,", "await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 =", "await queue.get() # type: DeliveredMessage assert message.body == b\"\" cancel_ok", "declare_ok = await channel.queue_declare() future = loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue)", "queue. It sends acks like this 1 2 4 5(multiple,", "channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"acked\"", "channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"nacked\"", "= await queue.get() assert message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False)", "channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await", "declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await future await", "with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally: await", "= await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message =", "test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with", "== consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok", "channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message = await future assert", "await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await queue.get() assert message.body ==", "aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future = loop.create_future()", "await future assert message.body == b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel):", "channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok =", "amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message", "assert message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel):", "# type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future", "await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def bad_consumer(message): await", "b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert", "were completed (confirmed)\" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def", "await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in", "messages are delivered to a queue. It sends acks like", "def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel # type: aiormq.Channel await", "test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has been observed to send confirmations", "assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await", "deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await", "= await amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try:", "TCPProxy import aiormq async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert", "== b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message =", "strange pattern when publishing simultaneously where only some messages are", "message = await queue.get() assert message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag,", "4 5(multiple, confirming also 3). This test is probably inconsequential", "channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel):", "This is a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel", "routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await consumer_tag)", "channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok = await channel.basic_consume( declare_ok.queue,", "message = await future assert message.body == b\"urgent\" async def", "def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, )", "message.body == b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel", "= loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise", "async def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1 = await", "= amqp_channel # type: aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange,", "consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"),", "amqp_channel.number queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok =", "= await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish(", "message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop):", "proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy:", "async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await proxy_connection.channel() #", "await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message", "await asyncio.wait(messages, timeout=0.2) assert not pending, \"not all publishes were", "2 4 5(multiple, confirming also 3). This test is probably", "queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await queue.get() assert", "async def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok", "def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue()", "import asyncio import uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy", "probably inconsequential without publisher_confirms This is a regression for https://github.com/mosquito/aiormq/issues/10", "await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue,", "def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has been observed to send", "await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get()", "cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag", "async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True,", "range(10) ] _, pending = await asyncio.wait(messages, timeout=0.2) assert not", "await proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection,", "future assert message.body == b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel", "import uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq", "future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b\"urgent\" future =", "bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok = await", "consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message", "amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await proxy_connection.channel()", "asyncio.wait(messages, timeout=0.2) assert not pending, \"not all publishes were completed", "amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue,", "= await future assert message.body == b\"urgent\" async def test_ack_nack_reject(amqp_channel:", "no_ack=True, ) message = await future assert message.body == b\"urgent\"", "await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname,", "not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await", "routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"nacked\" await", "amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True)", "= await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", ) for", "consumer_tag.set_result(consume_ok.consumer_tag) message = await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body", "@pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel #", "await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok = await channel.basic_consume(", "no_ack=True) assert message.body == b\"foo bar\" async def test_blank_body(amqp_channel: aiormq.Channel):", "assert message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel,", "assert amqp_channel.number queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok", "also 3). This test is probably inconsequential without publisher_confirms This", "range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for", "like this 1 2 4 5(multiple, confirming also 3). This", "channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\",", "declare_ok.queue, future.set_result, no_ack=True, ) message = await future assert message.body", "import aiormq async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number", "in range(10) ] _, pending = await asyncio.wait(messages, timeout=0.2) assert", "i in range(10) ] _, pending = await asyncio.wait(messages, timeout=0.2)", "channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b\"urgent\" future = loop.create_future() await", "= await queue.get() assert message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async", "pending, \"not all publishes were completed (confirmed)\" await asyncio.sleep(0.05) finally:", "b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has", "message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message", "future = loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message", "async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has been observed to", "routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"acked\" await", "pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\")", "asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for i in range(10) ]", "in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo", "no_ack=True) assert message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel:", "await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" async def", "proxy_connection.channel() # type: aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0):", "= await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" async", "= await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions", "test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1)", "== b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag", "requeue=True) assert message.body == b\"urgent\" future = loop.create_future() await channel.basic_consume(", "auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy:", "await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await", "# type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue", "TCPProxy): channel = await proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True)", "confirmations in a strange pattern when publishing simultaneously where only", "for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel # type: aiormq.Channel exchange", ")) for i in range(10) ] _, pending = await", "await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await", "queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await", "await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b\"urgent\" future = loop.create_future()", "queue.get() assert message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\",", "type: aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError):", "a queue. It sends acks like this 1 2 4", "3). This test is probably inconsequential without publisher_confirms This is", "pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async def test_simple(amqp_channel:", "queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await", "b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await", "inconsequential without publisher_confirms This is a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\"", "with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await", "await proxy_connection.channel() # type: aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5,", "some messages are delivered to a queue. It sends acks", "async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel # type:", "channel0 = await amqp_connection.channel() channel1 = await amqp_connection.channel() qname =", "message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\"", "amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue,", "= [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for i in", "asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 = await", "await amqp_connection.channel() channel1 = await amqp_connection.channel() qname = str(uuid.uuid4()) await", "routing_key=\"test.5\", ) for i in range(10): messages = [ asyncio.ensure_future(channel.basic_publish(", "queue.get() # type: DeliveredMessage assert message.body == b\"\" cancel_ok =", "b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() # type:", "pending = await asyncio.wait(messages, timeout=0.2) assert not pending, \"not all", "\"not all publishes were completed (confirmed)\" await asyncio.sleep(0.05) finally: await", "await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await queue.get()", "future = loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async", "await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body ==", "with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel", "in a strange pattern when publishing simultaneously where only some", "await queue.get() assert message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def", "channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has been observed", "bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await future await channel.basic_reject(message.delivery.delivery_tag,", "publisher_confirms This is a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel =", "uuid import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async", "test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok", "sends acks like this 1 2 4 5(multiple, confirming also", "where only some messages are delivered to a queue. It", "range(3): channel = await proxy_connection.channel() # type: aiormq.Channel qname =", "= await queue.get() assert message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False)", "It sends acks like this 1 2 4 5(multiple, confirming", "amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message =", "await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare(", "assert message.body == b\"foo bar\" async def test_blank_body(amqp_channel: aiormq.Channel): await", "aiormq.Channel): \"\"\" RabbitMQ has been observed to send confirmations in", "has been observed to send confirmations in a strange pattern", "# type: aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with", "aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue = asyncio.Queue()", "bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel", "DeliveredMessage assert message.body == b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert", "message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\"", "== b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag", "await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await", "amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message =", "loop): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok", "= uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok = await channel.queue_declare(exclusive=True)", "simultaneously where only some messages are delivered to a queue.", "amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def", "routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"rejected\" await", "finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await", "test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel # type: aiormq.Channel await", "properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() # type: DeliveredMessage assert", "= await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message = await", "== b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel #", "amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers", "type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future =", "amqp_channel # type: aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\")", "await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await", "amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname,", "aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok =", "channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", ) for i in range(10): messages", "message = await queue.get() # type: DeliveredMessage assert message.body ==", "== b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message =", "await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname) async", "message.body == b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag ==", "aiormq.Channel, loop): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1)", "aiormq.Channel qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await", "b\"urgent\" future = loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, )", "channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await queue.get() assert", "amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True)", "pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname)", ") consumer_tag.set_result(consume_ok.consumer_tag) message = await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert", "timeout=0.2) assert not pending, \"not all publishes were completed (confirmed)\"", "= await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False)", "= await channel.queue_declare() future = loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag", "aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async def test_simple(amqp_channel: aiormq.Channel): await", "deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message =", "no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True)", "channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag) message = await future", "type: aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok", "loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message = await", "declare_ok.queue, exchange, routing_key=\"test.5\", ) for i in range(10): messages =", "exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally:", "test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok", ") with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy: TCPProxy):", "channel = await proxy_connection.channel() # type: aiormq.Channel qname = str(uuid.uuid4())", "== b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel", "# type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy):", "without publisher_confirms This is a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel", "observed to send confirmations in a strange pattern when publishing", "proxy: TCPProxy): channel = await proxy_connection.channel() # type: aiormq.Channel await", "channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed): await amqp_channel.queue_declare( \"amq.forbidden_queue_name\",", "message.body == b\"urgent\" future = loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result,", "declare_ok = await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put,", "delivered to a queue. It sends acks like this 1", "in range(3): channel = await proxy_connection.channel() # type: aiormq.Channel qname", "future.set_result, no_ack=True, ) message = await future assert message.body ==", "def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue()", "message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message", "assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await", "amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" async def test_blank_body(amqp_channel:", "TCPProxy): for _ in range(3): channel = await proxy_connection.channel() #", "raise Exception consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, )", "pattern when publishing simultaneously where only some messages are delivered", "from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async def test_simple(amqp_channel: aiormq.Channel):", "# type: DeliveredMessage assert message.body == b\"\" cancel_ok = await", "b\"urgent\" async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel # type:", "= await proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def", "routing_key=\"test.{}\".format(i), )) for i in range(10) ] _, pending =", "amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() #", "await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue)", "amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\",", "= loop.create_future() await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message =", "assert message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue)", "queue.get() assert message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel:", "future.set_result(message) raise Exception consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False,", "https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel # type: aiormq.Channel exchange =", "exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname)", "\"\"\" RabbitMQ has been observed to send confirmations in a", "to send confirmations in a strange pattern when publishing simultaneously", "str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True) with", "amqp_channel.queue_declare(auto_delete=True) await amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True)", "assert message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue)", "message.body == b\"foo bar\" async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1)", "channel.queue_declare() future = loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future()", "await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\",", "for _ in range(3): channel = await proxy_connection.channel() # type:", "uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok = await channel.queue_declare(exclusive=True) await", "message = await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body ==", "been observed to send confirmations in a strange pattern when", "regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel # type: aiormq.Channel", "= asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue,", "type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue =", "only some messages are delivered to a queue. It sends", "queue = asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await", "aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _", "await queue.get() assert message.body == b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await", "print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True)", "send confirmations in a strange pattern when publishing simultaneously where", "await channel.basic_consume( declare_ok.queue, future.set_result, no_ack=True, ) message = await future", "channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await queue.get() assert message.body == b\"rejected\"", "Exception consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer, no_ack=False, ) consumer_tag.set_result(consume_ok.consumer_tag)", "type: aiormq.Channel await channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for", "RabbitMQ has been observed to send confirmations in a strange", "await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get()", "async def test_ack_nack_reject(amqp_channel: aiormq.Channel): channel = amqp_channel # type: aiormq.Channel", "== b\"foo bar\" async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert", "await channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel:", "qname = str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare(", "await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind(", "bar\", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body ==", "[ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for i in range(10)", "channel = await proxy_connection.channel() # type: aiormq.Channel await channel.queue_declare(auto_delete=True) async", "amqp_connection.channel() channel1 = await amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname,", "consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok =", "channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await queue.get() assert", "consumer_tag = loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message)", "amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async def", "import TCPProxy import aiormq async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1)", "_ in range(3): channel = await proxy_connection.channel() # type: aiormq.Channel", "publishes were completed (confirmed)\" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async", "requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body", "await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await", "await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", ) for i", "= await proxy_connection.channel() # type: aiormq.Channel qname = str(uuid.uuid4()) with", "str(uuid.uuid4()) with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare( qname, auto_delete=True,", "== b\"acked\" await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ", "i in range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i),", "async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue =", "to a queue. It sends acks like this 1 2", "def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1 = await amqp_connection.channel()", "asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message =", "channel1 = await amqp_connection.channel() qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True)", "is probably inconsequential without publisher_confirms This is a regression for", "channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", ) for i in", ") message = await queue.get() # type: DeliveredMessage assert message.body", "await channel.basic_ack(message.delivery.delivery_tag) async def test_confirm_multiple(amqp_channel: aiormq.Channel): \"\"\" RabbitMQ has been", "a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel # type:", "routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() # type: DeliveredMessage", "is a regression for https://github.com/mosquito/aiormq/issues/10 \"\"\" channel = amqp_channel #", "b\"nacked\" await channel.basic_nack(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await", "message = await queue.get() assert message.body == b\"acked\" await channel.basic_ack(message.delivery.delivery_tag)", "b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert", "def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception consume_ok =", "await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", ) for i in range(10):", "await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await queue.get()", "confirming also 3). This test is probably inconsequential without publisher_confirms", "messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), )) for i", "acks like this 1 2 4 5(multiple, confirming also 3).", "= str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True)", "test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1 = await amqp_connection.channel() qname", "a strange pattern when publishing simultaneously where only some messages", "await channel.basic_publish(b\"acked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body ==", "proxy: TCPProxy): for _ in range(3): channel = await proxy_connection.channel()", "loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await consumer_tag) future.set_result(message) raise Exception", "channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message = await queue.get()", "\"\"\" channel = amqp_channel # type: aiormq.Channel exchange = uuid.uuid4().hex", "proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare( qname, auto_delete=True, timeout=0.5 )", "assert not pending, \"not all publishes were completed (confirmed)\" await", "await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1", "bar\" async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue", "channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def bad_consumer(message): await channel.basic_cancel(await", "type: DeliveredMessage assert message.body == b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag)", "await channel.queue_declare() future = loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag =", "this 1 2 4 5(multiple, confirming also 3). This test", "] _, pending = await asyncio.wait(messages, timeout=0.2) assert not pending,", "exclusive=True) finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with pytest.raises(aiormq.exceptions.ChannelClosed):", "= await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not", "= await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), )", ") message = await future assert message.body == b\"urgent\" async", "completed (confirmed)\" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection):", "= await asyncio.wait(messages, timeout=0.2) assert not pending, \"not all publishes", "for i in range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange,", "await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in", "aiormq async def test_simple(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue", "async def test_blank_body(amqp_channel: aiormq.Channel): await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue =", "= await queue.get() # type: DeliveredMessage assert message.body == b\"foo\"", "DeliveredMessage assert message.body == b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert", "type: DeliveredMessage assert message.body == b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag)", "await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource):", "are delivered to a queue. It sends acks like this", "message = await queue.get() assert message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag,", "channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare(auto_delete=True) queue = asyncio.Queue() await channel.basic_consume(declare_ok.queue,", "def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in range(3): channel =", "not pending, \"not all publishes were completed (confirmed)\" await asyncio.sleep(0.05)", "test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in range(3): channel = await", "= amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok = await", "(confirmed)\" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0", "await future await channel.basic_reject(message.delivery.delivery_tag, requeue=True) assert message.body == b\"urgent\" future", "requeue=False) await channel.basic_publish(b\"nacked\", routing_key=declare_ok.queue) message = await queue.get() assert message.body", "finally: await channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel()", "channel1.queue_declare(qname) await channel1.basic_consume(qname, print, exclusive=True) finally: await channel0.queue_delete(qname) async def", "test_proxy_connection(proxy_connection, proxy: TCPProxy): channel = await proxy_connection.channel() # type: aiormq.Channel", "all publishes were completed (confirmed)\" await asyncio.sleep(0.05) finally: await channel.exchange_delete(exchange)", "test is probably inconsequential without publisher_confirms This is a regression", "# type: DeliveredMessage assert message.body == b\"foo\" cancel_ok = await", "await channel.basic_qos(prefetch_count=1) declare_ok = await channel.queue_declare() future = loop.create_future() await", "asyncio.Queue() deaclare_ok = await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put)", "assert message.body == b\"urgent\" future = loop.create_future() await channel.basic_consume( declare_ok.queue,", "when publishing simultaneously where only some messages are delivered to", "def test_bad_consumer(amqp_channel: aiormq.Channel, loop): channel = amqp_channel # type: aiormq.Channel", ") for i in range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\",", "aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok =", "try: await channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await", "qname = str(uuid.uuid4()) await channel0.queue_declare(qname, exclusive=True) try: await channel0.basic_consume(qname, print,", "= asyncio.Queue() await channel.basic_consume(declare_ok.queue, queue.put, no_ack=False) await channel.basic_publish(b\"rejected\", routing_key=declare_ok.queue) message", "consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"),", "in range(10): messages = [ asyncio.ensure_future(channel.basic_publish( b\"test\", exchange=exchange, routing_key=\"test.{}\".format(i), ))", "pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async def test_proxy_connection(proxy_connection, proxy: TCPProxy): channel =", "1 2 4 5(multiple, confirming also 3). This test is", "routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo", "= loop.create_future() await channel.basic_publish(b\"urgent\", routing_key=declare_ok.queue) consumer_tag = loop.create_future() async def", "cancel_ok.consumer_tag == consume_ok.consumer_tag assert cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue)", "await amqp_channel.queue_declare( \"amq.forbidden_queue_name\", auto_delete=True, ) with pytest.raises(aiormq.exceptions.ChannelInvalidStateError): await amqp_channel.queue_delete(\"amq.forbidden_queue_name\") async", "channel0.basic_consume(qname, print, exclusive=True) with pytest.raises(aiormq.exceptions.ChannelLockedResource): await channel1.queue_declare(qname) await channel1.basic_consume(qname, print,", "5(multiple, confirming also 3). This test is probably inconsequential without", "aiormq.Channel): channel = amqp_channel # type: aiormq.Channel await channel.basic_qos(prefetch_count=1) declare_ok", "consumer_tag) future.set_result(message) raise Exception consume_ok = await channel.basic_consume( declare_ok.queue, bad_consumer,", "await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body == b\"foo bar\" @pytest.mark.no_catch_loop_exceptions async", "exchange_type=\"topic\") try: declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange,", "assert message.body == b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag", "= await queue.get() # type: DeliveredMessage assert message.body == b\"\"", "b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() # type:", "channel.exchange_delete(exchange) async def test_exclusive_queue_locked(amqp_connection): channel0 = await amqp_connection.channel() channel1 =", "channel.queue_declare(auto_delete=True) async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in range(3):", "with proxy.slowdown(read_delay=5, write_delay=0): with pytest.raises(asyncio.TimeoutError): await channel.queue_declare( qname, auto_delete=True, timeout=0.5", "_, pending = await asyncio.wait(messages, timeout=0.2) assert not pending, \"not", "await amqp_channel.basic_qos(prefetch_count=1) assert amqp_channel.number queue = asyncio.Queue() deaclare_ok = await", "# type: aiormq.Channel exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try:", "assert message.body == b\"foo\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag", "await queue.get() # type: DeliveredMessage assert message.body == b\"foo\" cancel_ok", "exchange=exchange, routing_key=\"test.{}\".format(i), )) for i in range(10) ] _, pending", "async def test_declare_queue_timeout(proxy_connection, proxy: TCPProxy): for _ in range(3): channel", "channel = amqp_channel # type: aiormq.Channel exchange = uuid.uuid4().hex await", "await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message", "exchange = uuid.uuid4().hex await channel.exchange_declare(exchange, exchange_type=\"topic\") try: declare_ok = await", "This test is probably inconsequential without publisher_confirms This is a", "await queue.get() assert message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await", "cancel_ok.consumer_tag not in amqp_channel.consumers await amqp_channel.queue_delete(deaclare_ok.queue) deaclare_ok = await amqp_channel.queue_declare(auto_delete=True)", "import pytest from aiomisc_pytest.pytest_plugin import TCPProxy import aiormq async def", "await amqp_channel.queue_declare(auto_delete=True) consume_ok = await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"foo\",", "amqp_channel.basic_publish(b\"foo bar\", routing_key=deaclare_ok.queue) message = await amqp_channel.basic_get(deaclare_ok.queue, no_ack=True) assert message.body", "publishing simultaneously where only some messages are delivered to a", "declare_ok = await channel.queue_declare(exclusive=True) await channel.queue_bind( declare_ok.queue, exchange, routing_key=\"test.5\", )", "message.body == b\"\" cancel_ok = await amqp_channel.basic_cancel(consume_ok.consumer_tag) assert cancel_ok.consumer_tag ==", "queue.get() assert message.body == b\"rejected\" await channel.basic_reject(message.delivery.delivery_tag, requeue=False) await channel.basic_publish(b\"nacked\",", "= await amqp_channel.basic_consume(deaclare_ok.queue, queue.put) await amqp_channel.basic_publish( b\"\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), )", "print, exclusive=True) finally: await channel0.queue_delete(qname) async def test_remove_writer_when_closed(amqp_channel: aiormq.Channel): with", "amqp_channel.basic_publish( b\"foo\", routing_key=deaclare_ok.queue, properties=aiormq.spec.Basic.Properties(message_id=\"123\"), ) message = await queue.get() #" ]
[]
[ "a string, determine whether or not the parentheses are balanced", "''' if str is None: return True open_count = 0", "True open_count = 0 for char in str: if char", "or not the parentheses are balanced ''' def balanced_parens(str): '''", "parentheses are balanced ''' def balanced_parens(str): ''' runtime: O(n) space", "0 for char in str: if char == '(': open_count", "+= 1 elif char == ')': open_count -= 1 if", "1 elif char == ')': open_count -= 1 if open_count", ": O(1) ''' if str is None: return True open_count", "== ')': open_count -= 1 if open_count < 0: return", "return True open_count = 0 for char in str: if", "determine whether or not the parentheses are balanced ''' def", "char == '(': open_count += 1 elif char == ')':", "whether or not the parentheses are balanced ''' def balanced_parens(str):", "O(1) ''' if str is None: return True open_count =", "char == ')': open_count -= 1 if open_count < 0:", "description: Given a string, determine whether or not the parentheses", "not the parentheses are balanced ''' def balanced_parens(str): ''' runtime:", "for char in str: if char == '(': open_count +=", "''' runtime: O(n) space : O(1) ''' if str is", "= 0 for char in str: if char == '(':", "open_count += 1 elif char == ')': open_count -= 1", "balanced_parens(str): ''' runtime: O(n) space : O(1) ''' if str", "elif char == ')': open_count -= 1 if open_count <", "char in str: if char == '(': open_count += 1", "string, determine whether or not the parentheses are balanced '''", "if str is None: return True open_count = 0 for", "None: return True open_count = 0 for char in str:", "Given a string, determine whether or not the parentheses are", "open_count = 0 for char in str: if char ==", "space : O(1) ''' if str is None: return True", "'(': open_count += 1 elif char == ')': open_count -=", "the parentheses are balanced ''' def balanced_parens(str): ''' runtime: O(n)", "in str: if char == '(': open_count += 1 elif", "-= 1 if open_count < 0: return False return open_count", "O(n) space : O(1) ''' if str is None: return", "str: if char == '(': open_count += 1 elif char", "if char == '(': open_count += 1 elif char ==", "== '(': open_count += 1 elif char == ')': open_count", "if open_count < 0: return False return open_count == 0", "''' Problem description: Given a string, determine whether or not", "is None: return True open_count = 0 for char in", "are balanced ''' def balanced_parens(str): ''' runtime: O(n) space :", "')': open_count -= 1 if open_count < 0: return False", "Problem description: Given a string, determine whether or not the", "str is None: return True open_count = 0 for char", "balanced ''' def balanced_parens(str): ''' runtime: O(n) space : O(1)", "1 if open_count < 0: return False return open_count ==", "''' def balanced_parens(str): ''' runtime: O(n) space : O(1) '''", "runtime: O(n) space : O(1) ''' if str is None:", "def balanced_parens(str): ''' runtime: O(n) space : O(1) ''' if", "open_count -= 1 if open_count < 0: return False return" ]
[ "event data. Attributes: configuration (str): CCleaner configuration. key_path (str): Windows", "self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if", "WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the", "10:03:14 AM\" # TODO: determine if this is true for", "int(year, 10) month = int(month, 10) day_of_month = int(day_of_month, 10)", "None for registry_value in registry_key.GetValues(): if not registry_value.name or not", "date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name,", "interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time string formatted as: \"MM/DD/YYYY", "mediates interactions between parsers and other components, such as storage", "{1!s}'.format(registry_value.name, value)) if date_time: event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path", "None self.key_path = None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data.", "return None month, day_of_month, year, hours, minutes, seconds, part_of_day =", "self.configuration = None self.key_path = None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update", "from dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers import events", "(App)Other Explorer MRUs [REG_SZ] * (App)Recent Documents [REG_SZ] * (App)Recently", "[REG_SZ] * (App)Recently Typed URLs [REG_SZ] * (App)Run (in Start", "parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return None", "registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time:", "'.join(sorted(configuration)) or None event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time,", "* (App)Other Explorer MRUs [REG_SZ] * (App)Recent Documents [REG_SZ] *", "[A|P]M\", for example \"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ], contains", "dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey", "* WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the windows width", "= 'ccleaner:configuration' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE)", "plaso.containers import events from plaso.containers import time_events from plaso.lib import", "= 'ccleaner:update' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path", "such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key.", "width in number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME", "\"\"\"CCleaner configuration event data. Attributes: configuration (str): CCleaner configuration. key_path", "Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ] *", "example \"07/13/2013 10:03:14 AM\" # TODO: determine if this is", "event data. Attributes: key_path (str): Windows Registry key path. \"\"\"", "hours += 12 time_elements_tuple = (year, month, day_of_month, hours, minutes,", "None if part_of_day == 'PM': hours += 12 time_elements_tuple =", "configuration event data. Attributes: configuration (str): CCleaner configuration. key_path (str):", "self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path = None class CCleanerUpdateEventData(events.EventData):", "* (App)Temporary Internet Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ] *", "(ParserMediator): mediates interactions between parsers and other components, such as", "10) month = int(month, 10) day_of_month = int(day_of_month, 10) hours", "the CCleaner Registry key.\"\"\" import re from dfdatetime import time_elements", "[] date_time = None for registry_value in registry_key.GetValues(): if not", "pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner' DATA_FORMAT =", "**kwargs): \"\"\"Extracts events from a Windows Registry key. Args: parser_mediator", "event_data) event_data = CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or None", "value. Returns: dfdatetime_time_elements.TimeElements: date and time value or None if", "value data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match:", "= None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data. Attributes: key_path", "registry_key.GetValues(): if not registry_value.name or not registry_value.data: continue if registry_value.name", "if this is true for other locales. _UPDATE_DATE_TIME_RE = re.compile(", "True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(", "[REG_SZ], contains \"True\" if the cookies should be cleaned; *", "WINDOW_HEIGHT [REG_SZ], contains the windows height in number of pixels;", "registry_value.data: continue if registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value)", "frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time string formatted as:", "date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning(", "UpdateKey [REG_SZ], contains a date and time formatted as: \"MM/DD/YYYY", "self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys", "other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def", "hours = int(hours, 10) minutes = int(minutes, 10) seconds =", "key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other", "between parsers and other components, such as storage and dfvfs.", "formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" # for example \"07/13/2013 10:03:14", "return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported", "minutes, seconds, part_of_day = ( re_match.groups()) try: year = int(year,", "of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner' DATA_FORMAT", "([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the UpdateKey value. Args:", "= [] date_time = None for registry_value in registry_key.GetValues(): if", "other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows", "# Date and time string formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\"", "import winreg_parser from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration", "is true for other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) '", "(dfwinreg.WinRegistryKey): Windows Registry key. \"\"\" configuration = [] date_time =", "event_data.configuration = ' '.join(sorted(configuration)) or None event_data.key_path = registry_key.path event", "* (App)Delete Index.dat files [REG_SZ] * (App)History [REG_SZ] * (App)Last", "[REG_SZ] * (App)History [REG_SZ] * (App)Last Download Location [REG_SZ] *", "and time formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for example \"07/13/2013", "CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data. Attributes: configuration (str): CCleaner configuration.", "[REG_SZ] * (App)Last Download Location [REG_SZ] * (App)Other Explorer MRUs", "= 'CCleaner Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) #", "in registry_key.GetValues(): if not registry_value.name or not registry_value.data: continue if", "not available. \"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value", "CCleaner Keys for NTUSER hive. Known Windows Registry values within", "'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return None return", "{0!s}'.format(date_time_string)) return None month, day_of_month, year, hours, minutes, seconds, part_of_day", "data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning(", "DATA_TYPE = 'ccleaner:configuration' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__(", "configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data = CCleanerUpdateEventData() event_data.key_path =", "and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey):", "in number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME =", "= dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid", "data. Attributes: configuration (str): CCleaner configuration. key_path (str): Windows Registry", "try: year = int(year, 10) month = int(month, 10) day_of_month", "Registry key path. \"\"\" DATA_TYPE = 'ccleaner:configuration' def __init__(self): \"\"\"Initializes", "re_match.groups()) try: year = int(year, 10) month = int(month, 10)", "WINDOW_WIDTH [REG_SZ], contains the windows width in number of pixels;", "seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except", "UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and", "= None self.key_path = None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event", "Attributes: key_path (str): Windows Registry key path. \"\"\" DATA_TYPE =", "int(month, 10) day_of_month = int(day_of_month, 10) hours = int(hours, 10)", "for other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)')", "(App)Run (in Start Menu) [REG_SZ] * (App)Temporary Internet Files [REG_SZ]", "(App)Recent Documents [REG_SZ] * (App)Recently Typed URLs [REG_SZ] * (App)Run", "= registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data) winreg_parser.WinRegistryParser.RegisterPlugin(CCleanerPlugin)", "as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns:", "storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. \"\"\" configuration", "def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration =", "within the CCleaner key: * (App)Cookies [REG_SZ], contains \"True\" if", "events from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates", "* (App)Cookies [REG_SZ], contains \"True\" if the cookies should be", "= None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys for NTUSER", "[REG_SZ], contains a date and time formatted as: \"MM/DD/YYYY hh:mm:ss", "such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value.", "and time string formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" # for", "* WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] *", "this is true for other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9])", "None date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value", "event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( date_time,", "dfdatetime_time_elements from plaso.containers import events from plaso.containers import time_events from", "data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time", "Registry key. \"\"\" configuration = [] date_time = None for", "time_elements as dfdatetime_time_elements from plaso.containers import events from plaso.containers import", "dfdatetime_time_elements.TimeElements: date and time value or None if not available.", "day_of_month = int(day_of_month, 10) hours = int(hours, 10) minutes =", "and time value or None if not available. \"\"\" if", "Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers", "from plaso.containers import events from plaso.containers import time_events from plaso.lib", "a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions between", "(str): Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:update' def", "registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value =", "= int(day_of_month, 10) hours = int(hours, 10) minutes = int(minutes,", "\"\"\"Parser for the CCleaner Registry key.\"\"\" import re from dfdatetime", "definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface class", "plaso.lib import definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import", "'ccleaner:configuration' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration", "def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None", "if registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value", "Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and", "minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True", "data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the", "* (App)Run (in Start Menu) [REG_SZ] * (App)Temporary Internet Files", "parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return None if", "windows height in number of pixels; * WINDOW_LEFT [REG_SZ] *", "for example \"07/13/2013 10:03:14 AM\" # TODO: determine if this", "month, day_of_month, year, hours, minutes, seconds, part_of_day = ( re_match.groups())", "data: {0!s}'.format(date_time_string)) return None month, day_of_month, year, hours, minutes, seconds,", "value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other", "a date and time formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for", "Registry values within the CCleaner key: * (App)Cookies [REG_SZ], contains", "* WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains", "date time value: {0!s}'.format(date_time_string)) return None if part_of_day == 'PM':", "if the cookies should be cleaned; * (App)Delete Index.dat files", "registry_value): \"\"\"Parses the UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions", "ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return None", "from plaso.containers import time_events from plaso.lib import definitions from plaso.parsers", "cookies should be cleaned; * (App)Delete Index.dat files [REG_SZ] *", "year, hours, minutes, seconds, part_of_day = ( re_match.groups()) try: year", "Keys for NTUSER hive. Known Windows Registry values within the", "Returns: dfdatetime_time_elements.TimeElements: date and time value or None if not", "dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date and", "\"\"\" configuration = [] date_time = None for registry_value in", "\"\"\" DATA_TYPE = 'ccleaner:update' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData,", "number of pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] *", "= int(month, 10) day_of_month = int(day_of_month, 10) hours = int(hours,", "for example \"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ], contains the", "seconds = int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey", "month, day_of_month, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple)", "time_elements_tuple)) return None return date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs):", "Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date and time value or", "parsers and other components, such as storage and dfvfs. registry_value", "values within the CCleaner key: * (App)Cookies [REG_SZ], contains \"True\"", "true for other locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9])", "registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return", "\"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ], contains the windows height", "CCleaner Registry key.\"\"\" import re from dfdatetime import time_elements as", "event_data = CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or None event_data.key_path", "Index.dat files [REG_SZ] * (App)History [REG_SZ] * (App)Last Download Location", "__init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None", "import re from dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers", "WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH", "_UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator,", "* (App)Recent Documents [REG_SZ] * (App)Recently Typed URLs [REG_SZ] *", "for registry_value in registry_key.GetValues(): if not registry_value.name or not registry_value.data:", "key_path (str): Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:configuration'", "registry_key, **kwargs): \"\"\"Extracts events from a Windows Registry key. Args:", "Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:configuration' def __init__(self):", "[REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the windows width in number", "key_path (str): Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:update'", "event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers", "date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time", "event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)", "(App)Temporary Internet Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ] * CookiesToSave", "# -*- coding: utf-8 -*- \"\"\"Parser for the CCleaner Registry", "as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for example \"07/13/2013 10:03:14 AM\"; *", "part_of_day == 'PM': hours += 12 time_elements_tuple = (year, month,", "storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements:", "[REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ],", "time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds) try: date_time", "other components, such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows", "\"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type:", "Start Menu) [REG_SZ] * (App)Temporary Internet Files [REG_SZ] * (App)Thumbnail", "components, such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry", "key path. \"\"\" DATA_TYPE = 'ccleaner:update' def __init__(self): \"\"\"Initializes event", "date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts events from a", "+= 12 time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds)", "= CCleanerUpdateEventData() event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE,", "except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string))", "self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys for", "= registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return", "time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration))", "= CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or None event_data.key_path =", "def ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts events from a Windows", "the UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions between parsers", "or not registry_value.data: continue if registry_value.name == 'UpdateKey': date_time =", "date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data')", "events from plaso.containers import time_events from plaso.lib import definitions from", "determine if this is true for other locales. _UPDATE_DATE_TIME_RE =", "-*- \"\"\"Parser for the CCleaner Registry key.\"\"\" import re from", "* (App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey [REG_SZ],", "\"MM/DD/YYYY hh:mm:ss [A|P]M\" # for example \"07/13/2013 10:03:14 AM\" #", "[REG_SZ], contains the windows height in number of pixels; *", "\"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path", "NTUSER hive. Known Windows Registry values within the CCleaner key:", "parser_mediator (ParserMediator): mediates interactions between parsers and other components, such", "not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string))", "day_of_month, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time", "UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string =", "* (App)Recently Typed URLs [REG_SZ] * (App)Run (in Start Menu)", "hh:mm:ss [A|P]M\", for example \"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ],", "hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time =", "hive. Known Windows Registry values within the CCleaner key: *", "Known Windows Registry values within the CCleaner key: * (App)Cookies", "( re_match.groups()) try: year = int(year, 10) month = int(month,", "(App)Recently Typed URLs [REG_SZ] * (App)Run (in Start Menu) [REG_SZ]", "date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string)", "contains \"True\" if the cookies should be cleaned; * (App)Delete", "for NTUSER hive. Known Windows Registry values within the CCleaner", "see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner' DATA_FORMAT = 'CCleaner Registry", "CCleaner configuration. key_path (str): Windows Registry key path. \"\"\" DATA_TYPE", "date and time formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for example", "value data: {0!s}'.format(date_time_string)) return None month, day_of_month, year, hours, minutes,", "components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry", "Download Location [REG_SZ] * (App)Other Explorer MRUs [REG_SZ] * (App)Recent", "Registry value. Returns: dfdatetime_time_elements.TimeElements: date and time value or None", "time value: {0!s}'.format(date_time_string)) return None if part_of_day == 'PM': hours", "[REG_SZ] * (App)Run (in Start Menu) [REG_SZ] * (App)Temporary Internet", "data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path = None", "time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date", "10) hours = int(hours, 10) minutes = int(minutes, 10) seconds", "Windows Registry values within the CCleaner key: * (App)Cookies [REG_SZ],", "plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data. Attributes:", "key.\"\"\" import re from dfdatetime import time_elements as dfdatetime_time_elements from", "or None event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN)", "path. \"\"\" DATA_TYPE = 'ccleaner:update' def __init__(self): \"\"\"Initializes event data.\"\"\"", "as dfdatetime_time_elements from plaso.containers import events from plaso.containers import time_events", "registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data", "= int(year, 10) month = int(month, 10) day_of_month = int(day_of_month,", "* (App)History [REG_SZ] * (App)Last Download Location [REG_SZ] * (App)Other", "'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time string formatted as: \"MM/DD/YYYY hh:mm:ss", "from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data.", "data_type=self.DATA_TYPE) self.configuration = None self.key_path = None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner", "Documents [REG_SZ] * (App)Recently Typed URLs [REG_SZ] * (App)Run (in", "\"\"\" DATA_TYPE = 'ccleaner:configuration' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerConfigurationEventData,", "windows width in number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\"", "_ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the UpdateKey value. Args: parser_mediator (ParserMediator):", "self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string))", "for the CCleaner Registry key.\"\"\" import re from dfdatetime import", "None if not available. \"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported", "example \"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ], contains the windows", "if part_of_day == 'PM': hours += 12 time_elements_tuple = (year,", "http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner' DATA_FORMAT = 'CCleaner Registry data'", "int(day_of_month, 10) hours = int(hours, 10) minutes = int(minutes, 10)", "return None date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey", "(in Start Menu) [REG_SZ] * (App)Temporary Internet Files [REG_SZ] *", "contains the windows width in number of pixels; Also see:", "{0!s}'.format(date_time_string)) return None if part_of_day == 'PM': hours += 12", "None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys for NTUSER hive.", "\"True\" if the cookies should be cleaned; * (App)Delete Index.dat", "from a Windows Registry key. Args: parser_mediator (ParserMediator): mediates interactions", "== 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject()", "\"\"\" NAME = 'ccleaner' DATA_FORMAT = 'CCleaner Registry data' FILTERS", "import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data. Attributes: configuration", "* WINDOW_WIDTH [REG_SZ], contains the windows width in number of", "registry_value.name or not registry_value.data: continue if registry_value.name == 'UpdateKey': date_time", "formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for example \"07/13/2013 10:03:14 AM\";", "year = int(year, 10) month = int(month, 10) day_of_month =", "date_time: event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent(", "'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return None if part_of_day", "None month, day_of_month, year, hours, minutes, seconds, part_of_day = (", "10:03:14 AM\"; * WINDOW_HEIGHT [REG_SZ], contains the windows height in", "be cleaned; * (App)Delete Index.dat files [REG_SZ] * (App)History [REG_SZ]", "\"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin):", "if not registry_value.name or not registry_value.data: continue if registry_value.name ==", "import events from plaso.containers import time_events from plaso.lib import definitions", "registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data = CCleanerUpdateEventData() event_data.key_path", "return None return date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts", "coding: utf-8 -*- \"\"\"Parser for the CCleaner Registry key.\"\"\" import", "interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data. Attributes: configuration (str):", "Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner' DATA_FORMAT = 'CCleaner", "parser_mediator, registry_value): \"\"\"Parses the UpdateKey value. Args: parser_mediator (ParserMediator): mediates", "None event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( registry_key.last_written_time, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event,", "[REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains a date", "time_events from plaso.lib import definitions from plaso.parsers import winreg_parser from", "should be cleaned; * (App)Delete Index.dat files [REG_SZ] * (App)History", "(App)Cookies [REG_SZ], contains \"True\" if the cookies should be cleaned;", "if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None re_match", "day_of_month, year, hours, minutes, seconds, part_of_day = ( re_match.groups()) try:", "Registry key. Args: parser_mediator (ParserMediator): mediates interactions between parsers and", "def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the UpdateKey value. Args: parser_mediator", "DATA_FORMAT = 'CCleaner Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')])", "value or None if not available. \"\"\" if not registry_value.DataIsString():", "from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData):", "import time_elements as dfdatetime_time_elements from plaso.containers import events from plaso.containers", "\"\"\"CCleaner update event data. Attributes: key_path (str): Windows Registry key", "CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains a date and time", "return None if part_of_day == 'PM': hours += 12 time_elements_tuple", "value: {0!s}'.format( time_elements_tuple)) return None return date_time def ExtractEvents(self, parser_mediator,", "TODO: determine if this is true for other locales. _UPDATE_DATE_TIME_RE", "super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner", "ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts events from a Windows Registry", "* (App)Last Download Location [REG_SZ] * (App)Other Explorer MRUs [REG_SZ]", "registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing", "NAME = 'ccleaner' DATA_FORMAT = 'CCleaner Registry data' FILTERS =", "not registry_value.name or not registry_value.data: continue if registry_value.name == 'UpdateKey':", "the windows height in number of pixels; * WINDOW_LEFT [REG_SZ]", "= ( re_match.groups()) try: year = int(year, 10) month =", "Menu) [REG_SZ] * (App)Temporary Internet Files [REG_SZ] * (App)Thumbnail Cache", "locales. _UPDATE_DATE_TIME_RE = re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self,", "10) seconds = int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid", "return date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts events from", "(App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains", "registry_key (dfwinreg.WinRegistryKey): Windows Registry key. \"\"\" configuration = [] date_time", "MRUs [REG_SZ] * (App)Recent Documents [REG_SZ] * (App)Recently Typed URLs", "'ccleaner' DATA_FORMAT = 'CCleaner Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter(", "-*- coding: utf-8 -*- \"\"\"Parser for the CCleaner Registry key.\"\"\"", "URLs [REG_SZ] * (App)Run (in Start Menu) [REG_SZ] * (App)Temporary", "'ccleaner:update' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path =", "'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}:", "10) day_of_month = int(day_of_month, 10) hours = int(hours, 10) minutes", "key: * (App)Cookies [REG_SZ], contains \"True\" if the cookies should", "plaso.containers import time_events from plaso.lib import definitions from plaso.parsers import", "configuration (str): CCleaner configuration. key_path (str): Windows Registry key path.", "the CCleaner key: * (App)Cookies [REG_SZ], contains \"True\" if the", "if not available. \"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey", "hh:mm:ss [A|P]M\" # for example \"07/13/2013 10:03:14 AM\" # TODO:", "value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data =", "except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple))", "(App)Delete Index.dat files [REG_SZ] * (App)History [REG_SZ] * (App)Last Download", "try: date_time = dfdatetime_time_elements.TimeElements( time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError:", "r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the", "\"07/13/2013 10:03:14 AM\" # TODO: determine if this is true", "if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format(", "dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers import events from", "int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time", "parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if", "not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return None", "Registry key.\"\"\" import re from dfdatetime import time_elements as dfdatetime_time_elements", "' '.join(sorted(configuration)) or None event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent(", "as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" # for example \"07/13/2013 10:03:14 AM\"", "key path. \"\"\" DATA_TYPE = 'ccleaner:configuration' def __init__(self): \"\"\"Initializes event", "re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses", "string formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" # for example \"07/13/2013", "\"MM/DD/YYYY hh:mm:ss [A|P]M\", for example \"07/13/2013 10:03:14 AM\"; * WINDOW_HEIGHT", "data type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject() if", "not registry_value.data: continue if registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator,", "date_time = None for registry_value in registry_key.GetValues(): if not registry_value.name", "value)) if date_time: event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path event", "if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return", "[REG_SZ] * (App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey", "utf-8 -*- \"\"\"Parser for the CCleaner Registry key.\"\"\" import re", "* WINDOW_HEIGHT [REG_SZ], contains the windows height in number of", "date and time value or None if not available. \"\"\"", "seconds, part_of_day = ( re_match.groups()) try: year = int(year, 10)", "10) minutes = int(minutes, 10) seconds = int(seconds, 10) except", "and dfvfs. registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date", "event data.\"\"\" super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path =", "parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return None month, day_of_month,", "winreg_parser from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event", "[REG_SZ] * (App)Other Explorer MRUs [REG_SZ] * (App)Recent Documents [REG_SZ]", "number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html \"\"\" NAME = 'ccleaner'", "{0!s}'.format( time_elements_tuple)) return None return date_time def ExtractEvents(self, parser_mediator, registry_key,", "configuration. key_path (str): Windows Registry key path. \"\"\" DATA_TYPE =", "= 'ccleaner' DATA_FORMAT = 'CCleaner Registry data' FILTERS = frozenset([", "\"\"\"Gathers the CCleaner Keys for NTUSER hive. Known Windows Registry", "super(CCleanerConfigurationEventData, self).__init__( data_type=self.DATA_TYPE) self.configuration = None self.key_path = None class", "update event data. Attributes: key_path (str): Windows Registry key path.", "self.key_path = None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data. Attributes:", "interactions between parsers and other components, such as storage and", "'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string", "parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or", "the windows width in number of pixels; Also see: http://cheeky4n6monkey.blogspot.com/2012/02/writing-ccleaner-regripper-plugin-part_05.html", "height in number of pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX", "= frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time string formatted", "Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components,", "== 'PM': hours += 12 time_elements_tuple = (year, month, day_of_month,", "[REG_SZ] * UpdateKey [REG_SZ], contains a date and time formatted", "Explorer MRUs [REG_SZ] * (App)Recent Documents [REG_SZ] * (App)Recently Typed", "int(hours, 10) minutes = int(minutes, 10) seconds = int(seconds, 10)", "of pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP", "configuration = [] date_time = None for registry_value in registry_key.GetValues():", "12 time_elements_tuple = (year, month, day_of_month, hours, minutes, seconds) try:", "Typed URLs [REG_SZ] * (App)Run (in Start Menu) [REG_SZ] *", "= registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data)", "= re.compile( r'([0-9][0-9])/([0-9][0-9])/([0-9][0-9][0-9][0-9]) ' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value):", "key. \"\"\" configuration = [] date_time = None for registry_value", "contains the windows height in number of pixels; * WINDOW_LEFT", "AM\" # TODO: determine if this is true for other", "ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return", "None return date_time def ExtractEvents(self, parser_mediator, registry_key, **kwargs): \"\"\"Extracts events", "available. \"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data", "class CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys for NTUSER hive. Known", "Registry key path. \"\"\" DATA_TYPE = 'ccleaner:update' def __init__(self): \"\"\"Initializes", "Location [REG_SZ] * (App)Other Explorer MRUs [REG_SZ] * (App)Recent Documents", "= self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data:", "continue if registry_value.name == 'UpdateKey': date_time = self._ParseUpdateKeyValue(parser_mediator, registry_value) else:", "re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return None month,", "Date and time string formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" #", "time value: {0!s}'.format( time_elements_tuple)) return None return date_time def ExtractEvents(self,", "or None if not available. \"\"\" if not registry_value.DataIsString(): parser_mediator.ProduceExtractionWarning(", "files [REG_SZ] * (App)History [REG_SZ] * (App)Last Download Location [REG_SZ]", "{0:s}'.format( registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject() if not date_time_string:", "contains a date and time formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\",", "= True except ValueError: parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value:", "type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject() if not", "'unsupported UpdateKey value data: {0!s}'.format(date_time_string)) return None month, day_of_month, year,", "FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date and time string", "time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration", "Internet Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ] * CookiesToSave [REG_SZ]", "the CCleaner Keys for NTUSER hive. Known Windows Registry values", "# for example \"07/13/2013 10:03:14 AM\" # TODO: determine if", "CCleaner key: * (App)Cookies [REG_SZ], contains \"True\" if the cookies", "pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ] * WINDOW_TOP [REG_SZ]", "time value or None if not available. \"\"\" if not", "'CCleaner Registry data' FILTERS = frozenset([ interface.WindowsRegistryKeyPathFilter( 'HKEY_CURRENT_USER\\\\Software\\\\Piriform\\\\CCleaner')]) # Date", "if date_time: event_data = CCleanerUpdateEventData() event_data.key_path = registry_key.path event =", "(App)History [REG_SZ] * (App)Last Download Location [REG_SZ] * (App)Other Explorer", "import definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface", "Cache [REG_SZ] * CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains a", "registry_value in registry_key.GetValues(): if not registry_value.name or not registry_value.data: continue", "value: {0!s}'.format(date_time_string)) return None if part_of_day == 'PM': hours +=", "UpdateKey date time value: {0!s}'.format(date_time_string)) return None if part_of_day ==", "Windows Registry key. \"\"\" configuration = [] date_time = None", "= int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date", "(year, month, day_of_month, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements(", "parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value data type: {0:s}'.format( registry_value.data_type_string)) return None", "None class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data. Attributes: key_path (str):", "not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None re_match =", "date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration =", "dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. \"\"\" configuration = []", "definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData() event_data.configuration = '", "and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. \"\"\" configuration =", "UpdateKey date time value: {0!s}'.format( time_elements_tuple)) return None return date_time", "CCleanerPlugin(interface.WindowsRegistryPlugin): \"\"\"Gathers the CCleaner Keys for NTUSER hive. Known Windows", "[REG_SZ], contains the windows width in number of pixels; Also", "Attributes: configuration (str): CCleaner configuration. key_path (str): Windows Registry key", "r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the UpdateKey value.", "(App)Last Download Location [REG_SZ] * (App)Other Explorer MRUs [REG_SZ] *", "[REG_SZ] * (App)Recent Documents [REG_SZ] * (App)Recently Typed URLs [REG_SZ]", "class CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data. Attributes: key_path (str): Windows", "\"\"\"Parses the UpdateKey value. Args: parser_mediator (ParserMediator): mediates interactions between", "[REG_SZ] * WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the windows", "__init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE) self.key_path = None class", "time formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\", for example \"07/13/2013 10:03:14", "[A|P]M\" # for example \"07/13/2013 10:03:14 AM\" # TODO: determine", "path. \"\"\" DATA_TYPE = 'ccleaner:configuration' def __init__(self): \"\"\"Initializes event data.\"\"\"", "event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event,", "* CookiesToSave [REG_SZ] * UpdateKey [REG_SZ], contains a date and", "value data type: {0:s}'.format( registry_value.data_type_string)) return None date_time_string = registry_value.GetDataAsObject()", "= (year, month, day_of_month, hours, minutes, seconds) try: date_time =", "hours, minutes, seconds, part_of_day = ( re_match.groups()) try: year =", "class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner configuration event data. Attributes: configuration (str): CCleaner", "(str): CCleaner configuration. key_path (str): Windows Registry key path. \"\"\"", "registry_value (dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date and time", "AM\"; * WINDOW_HEIGHT [REG_SZ], contains the windows height in number", "UpdateKey value data') return None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not", "= registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data = CCleanerUpdateEventData()", "import time_events from plaso.lib import definitions from plaso.parsers import winreg_parser", "cleaned; * (App)Delete Index.dat files [REG_SZ] * (App)History [REG_SZ] *", "minutes = int(minutes, 10) seconds = int(seconds, 10) except (TypeError,", "[REG_SZ] * (App)Temporary Internet Files [REG_SZ] * (App)Thumbnail Cache [REG_SZ]", "WINDOW_TOP [REG_SZ] * WINDOW_WIDTH [REG_SZ], contains the windows width in", "* UpdateKey [REG_SZ], contains a date and time formatted as:", "= ' '.join(sorted(configuration)) or None event_data.key_path = registry_key.path event =", "# TODO: determine if this is true for other locales.", "= int(hours, 10) minutes = int(minutes, 10) seconds = int(seconds,", "CCleanerConfigurationEventData() event_data.configuration = ' '.join(sorted(configuration)) or None event_data.key_path = registry_key.path", "re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey value", "(str): Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:configuration' def", "= time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data = CCleanerConfigurationEventData()", "= None for registry_value in registry_key.GetValues(): if not registry_value.name or", "part_of_day = ( re_match.groups()) try: year = int(year, 10) month", "CCleanerUpdateEventData() event_data.key_path = registry_key.path event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone)", "in number of pixels; * WINDOW_LEFT [REG_SZ] * WINDOW_MAX [REG_SZ]", "parsers and other components, such as storage and dfvfs. registry_key", "event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_UPDATE, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data) event_data =", "10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value:", "None re_match = self._UPDATE_DATE_TIME_RE.match(date_time_string) if not re_match: parser_mediator.ProduceExtractionWarning( 'unsupported UpdateKey", "' r'([0-9][0-9]):([0-9][0-9]):([0-9][0-9]) ([A|P]M)') def _ParseUpdateKeyValue(self, parser_mediator, registry_value): \"\"\"Parses the UpdateKey", "re from dfdatetime import time_elements as dfdatetime_time_elements from plaso.containers import", "date time value: {0!s}'.format( time_elements_tuple)) return None return date_time def", "(TypeError, ValueError): parser_mediator.ProduceExtractionWarning( 'invalid UpdateKey date time value: {0!s}'.format(date_time_string)) return", "registry_value.GetDataAsObject() if not date_time_string: parser_mediator.ProduceExtractionWarning('missing UpdateKey value data') return None", "from plaso.lib import definitions from plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins", "= int(minutes, 10) seconds = int(seconds, 10) except (TypeError, ValueError):", "the cookies should be cleaned; * (App)Delete Index.dat files [REG_SZ]", "(dfwinreg.WinRegistryValue): Windows Registry value. Returns: dfdatetime_time_elements.TimeElements: date and time value", "data. Attributes: key_path (str): Windows Registry key path. \"\"\" DATA_TYPE", "int(minutes, 10) seconds = int(seconds, 10) except (TypeError, ValueError): parser_mediator.ProduceExtractionWarning(", "and other components, such as storage and dfvfs. registry_value (dfwinreg.WinRegistryValue):", "time string formatted as: \"MM/DD/YYYY hh:mm:ss [A|P]M\" # for example", "DATA_TYPE = 'ccleaner:update' def __init__(self): \"\"\"Initializes event data.\"\"\" super(CCleanerUpdateEventData, self).__init__(data_type=self.DATA_TYPE)", "month = int(month, 10) day_of_month = int(day_of_month, 10) hours =", "'PM': hours += 12 time_elements_tuple = (year, month, day_of_month, hours,", "UpdateKey value data: {0!s}'.format(date_time_string)) return None month, day_of_month, year, hours,", "parser_mediator, registry_key, **kwargs): \"\"\"Extracts events from a Windows Registry key.", "\"\"\"Extracts events from a Windows Registry key. Args: parser_mediator (ParserMediator):", "else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value)) if date_time: event_data", "= self._ParseUpdateKeyValue(parser_mediator, registry_value) else: value = registry_value.GetDataAsObject() configuration.append('{0:s}: {1!s}'.format(registry_value.name, value))", "plaso.parsers import winreg_parser from plaso.parsers.winreg_plugins import interface class CCleanerConfigurationEventData(events.EventData): \"\"\"CCleaner", "Windows Registry key path. \"\"\" DATA_TYPE = 'ccleaner:update' def __init__(self):", "as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. \"\"\"", "CCleanerUpdateEventData(events.EventData): \"\"\"CCleaner update event data. Attributes: key_path (str): Windows Registry" ]
[ "typing import Any, TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod", "further use. \"\"\" raise NotImplementedError() class Void: \"\"\" None is", "one inhabitant (according to eq and hash). \"\"\" @staticmethod def", "\"\"\" def __init__(self) -> None: raise Exception('Cannot instantiate Void') def", "for further use. \"\"\" raise NotImplementedError() class Void: \"\"\" None", "use. \"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) ->", "\"\"\" raise NotImplementedError() class Void: \"\"\" None is the type", "TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) ->", "<reponame>ejconlon/pushpluck from abc import ABCMeta, abstractmethod from dataclasses import dataclass", "it's impossible for `void` to exist in the first place.", "hash). \"\"\" @staticmethod def instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON", "Reset this to a known good state for further use.", "use. \"\"\" raise NotImplementedError() class Void: \"\"\" None is the", "class Void: \"\"\" None is the type with 1 inhabitant,", "to free resources and deny further use. \"\"\" raise NotImplementedError()", "close(self) -> None: \"\"\" Close this to free resources and", "Void') def absurd(self) -> X: \"\"\" This allows you to", "Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None: \"\"\" Close this to", "known good state for further use. \"\"\" raise NotImplementedError() class", "Any, TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self)", "with 1 inhabitant, None. Void is the type with 0", "def absurd(self) -> X: \"\"\" This allows you to trivially", "Void is the type with 0 inhabitants. \"\"\" def __init__(self)", "for `void` to exist in the first place. \"\"\" raise", "raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None: \"\"\"", "type with 0 inhabitants. \"\"\" def __init__(self) -> None: raise", "__init__(self) -> None: raise Exception('Cannot instantiate Void') def absurd(self) ->", "def close(self) -> None: \"\"\" Close this to free resources", "return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception): def __init__(self, value:", "import Any, TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def", "\"\"\" raise Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\" A simple type", "`void.absurd()` since it's impossible for `void` to exist in the", "from abc import ABCMeta, abstractmethod from dataclasses import dataclass from", "\"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None:", "@abstractmethod def close(self) -> None: \"\"\" Close this to free", "def reset(self) -> None: \"\"\" Reset this to a known", "abstractmethod from dataclasses import dataclass from typing import Any, TypeVar", "class Unit: \"\"\" A simple type with one inhabitant (according", "with one inhabitant (according to eq and hash). \"\"\" @staticmethod", "= Unit() class MatchException(Exception): def __init__(self, value: Any) -> None:", "def instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class", "_UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception): def __init__(self, value: Any)", "in the first place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True) class Unit:", "since it's impossible for `void` to exist in the first", "\"\"\" None is the type with 1 inhabitant, None. Void", "good state for further use. \"\"\" raise NotImplementedError() class Void:", "type with one inhabitant (according to eq and hash). \"\"\"", "-> None: \"\"\" Close this to free resources and deny", "import ABCMeta, abstractmethod from dataclasses import dataclass from typing import", "and deny further use. \"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod", "by returning `void.absurd()` since it's impossible for `void` to exist", "A simple type with one inhabitant (according to eq and", "a known good state for further use. \"\"\" raise NotImplementedError()", "This allows you to trivially satisfy type checking by returning", "from dataclasses import dataclass from typing import Any, TypeVar X", "ABCMeta, abstractmethod from dataclasses import dataclass from typing import Any,", "exist in the first place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True) class", "instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception):", "resources and deny further use. \"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta):", "reset(self) -> None: \"\"\" Reset this to a known good", "type with 1 inhabitant, None. Void is the type with", "\"\"\" @staticmethod def instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON =", "inhabitants. \"\"\" def __init__(self) -> None: raise Exception('Cannot instantiate Void')", "allows you to trivially satisfy type checking by returning `void.absurd()`", "0 inhabitants. \"\"\" def __init__(self) -> None: raise Exception('Cannot instantiate", "satisfy type checking by returning `void.absurd()` since it's impossible for", "checking by returning `void.absurd()` since it's impossible for `void` to", "Unit: \"\"\" A simple type with one inhabitant (according to", "MatchException(Exception): def __init__(self, value: Any) -> None: super().__init__(f'Failed to match", "class MatchException(Exception): def __init__(self, value: Any) -> None: super().__init__(f'Failed to", "dataclasses import dataclass from typing import Any, TypeVar X =", "`void` to exist in the first place. \"\"\" raise Exception('Absurd')", "Close this to free resources and deny further use. \"\"\"", "inhabitant (according to eq and hash). \"\"\" @staticmethod def instance()", "@dataclass(frozen=True) class Unit: \"\"\" A simple type with one inhabitant", "Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\" A simple type with one", "to a known good state for further use. \"\"\" raise", "impossible for `void` to exist in the first place. \"\"\"", "NotImplementedError() class Void: \"\"\" None is the type with 1", "and hash). \"\"\" @staticmethod def instance() -> 'Unit': return _UNIT_SINGLETON", "None: raise Exception('Cannot instantiate Void') def absurd(self) -> X: \"\"\"", "to trivially satisfy type checking by returning `void.absurd()` since it's", "Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None: \"\"\" Reset this to", "absurd(self) -> X: \"\"\" This allows you to trivially satisfy", "type checking by returning `void.absurd()` since it's impossible for `void`", "from typing import Any, TypeVar X = TypeVar('X') class Closeable(metaclass=ABCMeta):", "to exist in the first place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True)", "trivially satisfy type checking by returning `void.absurd()` since it's impossible", "instantiate Void') def absurd(self) -> X: \"\"\" This allows you", "raise Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\" A simple type with", "free resources and deny further use. \"\"\" raise NotImplementedError() class", "X = TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None:", "'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception): def __init__(self,", "first place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\" A", "\"\"\" This allows you to trivially satisfy type checking by", "NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None: \"\"\" Reset", "(according to eq and hash). \"\"\" @staticmethod def instance() ->", "\"\"\" A simple type with one inhabitant (according to eq", "returning `void.absurd()` since it's impossible for `void` to exist in", "the type with 0 inhabitants. \"\"\" def __init__(self) -> None:", "to eq and hash). \"\"\" @staticmethod def instance() -> 'Unit':", "X: \"\"\" This allows you to trivially satisfy type checking", "further use. \"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self)", "\"\"\" Close this to free resources and deny further use.", "abc import ABCMeta, abstractmethod from dataclasses import dataclass from typing", "Void: \"\"\" None is the type with 1 inhabitant, None.", "place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\" A simple", "this to free resources and deny further use. \"\"\" raise", "the first place. \"\"\" raise Exception('Absurd') @dataclass(frozen=True) class Unit: \"\"\"", "simple type with one inhabitant (according to eq and hash).", "None: \"\"\" Reset this to a known good state for", "def __init__(self) -> None: raise Exception('Cannot instantiate Void') def absurd(self)", "inhabitant, None. Void is the type with 0 inhabitants. \"\"\"", "is the type with 1 inhabitant, None. Void is the", "1 inhabitant, None. Void is the type with 0 inhabitants.", "the type with 1 inhabitant, None. Void is the type", "@abstractmethod def reset(self) -> None: \"\"\" Reset this to a", "class Resettable(metaclass=ABCMeta): @abstractmethod def reset(self) -> None: \"\"\" Reset this", "None is the type with 1 inhabitant, None. Void is", "TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None: \"\"\" Close", "import dataclass from typing import Any, TypeVar X = TypeVar('X')", "this to a known good state for further use. \"\"\"", "= TypeVar('X') class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None: \"\"\"", "raise NotImplementedError() class Void: \"\"\" None is the type with", "dataclass from typing import Any, TypeVar X = TypeVar('X') class", "Exception('Cannot instantiate Void') def absurd(self) -> X: \"\"\" This allows", "\"\"\" Reset this to a known good state for further", "-> X: \"\"\" This allows you to trivially satisfy type", "_UNIT_SINGLETON = Unit() class MatchException(Exception): def __init__(self, value: Any) ->", "def __init__(self, value: Any) -> None: super().__init__(f'Failed to match value:", "-> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit() class MatchException(Exception): def", "Unit() class MatchException(Exception): def __init__(self, value: Any) -> None: super().__init__(f'Failed", "-> None: \"\"\" Reset this to a known good state", "deny further use. \"\"\" raise NotImplementedError() class Resettable(metaclass=ABCMeta): @abstractmethod def", "None: \"\"\" Close this to free resources and deny further", "eq and hash). \"\"\" @staticmethod def instance() -> 'Unit': return", "class Closeable(metaclass=ABCMeta): @abstractmethod def close(self) -> None: \"\"\" Close this", "raise Exception('Cannot instantiate Void') def absurd(self) -> X: \"\"\" This", "state for further use. \"\"\" raise NotImplementedError() class Void: \"\"\"", "with 0 inhabitants. \"\"\" def __init__(self) -> None: raise Exception('Cannot", "None. Void is the type with 0 inhabitants. \"\"\" def", "__init__(self, value: Any) -> None: super().__init__(f'Failed to match value: {value}')", "-> None: raise Exception('Cannot instantiate Void') def absurd(self) -> X:", "@staticmethod def instance() -> 'Unit': return _UNIT_SINGLETON _UNIT_SINGLETON = Unit()", "you to trivially satisfy type checking by returning `void.absurd()` since", "is the type with 0 inhabitants. \"\"\" def __init__(self) ->" ]
[ "@pytest.mark.benchmark( group=\"Cube reading for large area spatial analysis high-res\", timer=time.perf_counter,", "config from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1)", "Read spatially # --------------- @pytest.mark.benchmark( group=\"Cube reading for small area", "import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config,", "getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10) class TestDefaultHighRes:", "# --------------- # Read spatially # --------------- @pytest.mark.benchmark( group=\"Cube reading", "test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- #", "# --------------- @pytest.mark.benchmark( group=\"Cube reading for small area spatial analysis", "args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for large area spatial", "# Read temporally # --------------- @pytest.mark.benchmark( group=\"Cube reading for subset", "iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for global temporal analysis high-res\",", "= getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self):", "high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial,", "@pytest.mark.benchmark( group=\"Cube reading for subset temporal analysis high-res\", timer=time.perf_counter, disable_gc=True,", "subset temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self,", "import pytest from test import config from test.cube_utils import CubeUtils", "timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(2160,),", "args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read temporally # ---------------", "test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading", "for large area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False )", "import time import pytest from test import config from test.cube_utils", "'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\",", "warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM)", "ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10)", "benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for", "cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for global", "autouse=True) def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320)", "= getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num', 10) class", "warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM)", "spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark,", "warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM)", "test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM =", "= CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield cube_utils # ---------------", "# --------------- # Read temporally # --------------- @pytest.mark.benchmark( group=\"Cube reading", "benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for global temporal", "import config from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num',", ") def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark(", "test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading", "pytest from test import config from test.cube_utils import CubeUtils ITERATIONS_NUM", "iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for large area spatial analysis", "iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read temporally # --------------- @pytest.mark.benchmark(", "large area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def", "small area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def", "analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default):", "timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,),", "temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark,", "getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils", "area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self,", "temporally # --------------- @pytest.mark.benchmark( group=\"Cube reading for subset temporal analysis", "reading for subset temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False )", "analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default):", "@pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160,", "10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils = CubeUtils()", "for subset temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def", "reading for large area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False", "reading for global temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False )", "rounds=ROUNDS_NUM) # --------------- # Read temporally # --------------- @pytest.mark.benchmark( group=\"Cube", "analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default):", "spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark,", "benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for large area", "rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for large area spatial analysis high-res\",", "for small area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False )", "from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM", "CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield cube_utils # --------------- #", "from test import config from test.cube_utils import CubeUtils ITERATIONS_NUM =", "disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM,", "46, 2160, 4320) yield cube_utils # --------------- # Read spatially", "--------------- # Read spatially # --------------- @pytest.mark.benchmark( group=\"Cube reading for", "group=\"Cube reading for subset temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False", "global temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self,", ") def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) #", "'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils =", "def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield", "yield cube_utils # --------------- # Read spatially # --------------- @pytest.mark.benchmark(", "--------------- @pytest.mark.benchmark( group=\"Cube reading for subset temporal analysis high-res\", timer=time.perf_counter,", "disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM,", "1) ROUNDS_NUM = getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True)", "timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,),", "group=\"Cube reading for large area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True,", "cube_utils # --------------- # Read spatially # --------------- @pytest.mark.benchmark( group=\"Cube", "4320) yield cube_utils # --------------- # Read spatially # ---------------", "cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for large", "benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read temporally #", "def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube", ") def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark(", "timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,),", "for global temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def", "disable_gc=True, warmup=False ) def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM,", "high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal,", "class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\",", "@pytest.mark.benchmark( group=\"Cube reading for global temporal analysis high-res\", timer=time.perf_counter, disable_gc=True,", "benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read", "2160, 4320) yield cube_utils # --------------- # Read spatially #", "warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM)", "time import pytest from test import config from test.cube_utils import", "CubeUtils ITERATIONS_NUM = getattr(config, 'iterations_num', 1) ROUNDS_NUM = getattr(config, 'rounds_num',", "cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield cube_utils # --------------- # Read", "rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for global temporal analysis high-res\", timer=time.perf_counter,", "def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube", "spatially # --------------- @pytest.mark.benchmark( group=\"Cube reading for small area spatial", "disable_gc=True, warmup=False ) def test_read_default_high_res_46x2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(2160,), iterations=ITERATIONS_NUM,", "group=\"Cube reading for global temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False", "Read temporally # --------------- @pytest.mark.benchmark( group=\"Cube reading for subset temporal", "test import config from test.cube_utils import CubeUtils ITERATIONS_NUM = getattr(config,", "--------------- @pytest.mark.benchmark( group=\"Cube reading for small area spatial analysis high-res\",", "cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield cube_utils #", "reading for small area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False", "analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default):", "high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial,", "area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_135x135(self,", "--------------- # Read temporally # --------------- @pytest.mark.benchmark( group=\"Cube reading for", "benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal, args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for", "TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46,", "cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # --------------- # Read temporally", "temporal analysis high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark,", "group=\"Cube reading for small area spatial analysis high-res\", timer=time.perf_counter, disable_gc=True,", "high-res\", timer=time.perf_counter, disable_gc=True, warmup=False ) def test_read_default_high_res_46x135x135(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_temporal,", "args=(135,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) @pytest.mark.benchmark( group=\"Cube reading for global temporal analysis", "# Read spatially # --------------- @pytest.mark.benchmark( group=\"Cube reading for small", "cube_default(self): cube_utils = CubeUtils() cube_utils.generate_cube(\"default_high_res\", 46, 2160, 4320) yield cube_utils", "@pytest.mark.benchmark( group=\"Cube reading for small area spatial analysis high-res\", timer=time.perf_counter,", "ROUNDS_NUM = getattr(config, 'rounds_num', 10) class TestDefaultHighRes: @pytest.fixture(scope=\"class\", autouse=True) def", "def test_read_default_high_res_2160x2160(self, benchmark, cube_default): benchmark.pedantic(cube_default.read_spatial, args=(2160,), iterations=ITERATIONS_NUM, rounds=ROUNDS_NUM) # ---------------", "# --------------- @pytest.mark.benchmark( group=\"Cube reading for subset temporal analysis high-res\"," ]
[ "must be a member of this set. start_index (int, optional):", "open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2): directives = convo.process('When", "(Union[str, Set[str]]): The reply must be a member of this", "responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context,", "for i in range(2): directives = convo.process('When does that open?').directives", "specified reply Args: directives (list[dict[str, dict]]): list of directives returned", "2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def", "open?').directives assert_reply(directives, 'Sorry I cannot help you. Please try again.')", "== target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params", "Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests", "True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context,", "'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass", "assert_reply(directives, templates, *, start_index=0, slots=None): \"\"\"Asserts that the provided directives", "dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async", "the specified reply Args: directives (list[dict[str, dict]]): list of directives", "not sure. You haven't told me where you are!\") @pytest.mark.conversation", "convo.process('are there any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm", "are cleared in one trip from app to mm.\"\"\" convo", "(dict, optional): The slots to fill the templates \"\"\" slots", "len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass assert", "does that open?').directives assert_reply(directives, 'Which store would you like to", "like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does", "test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder): pass assert some_handler.flow_state", "convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not get you.", "client action associated with this reply. slots (dict, optional): The", "me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure. You haven't told", "assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async", "assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that", "or {} if isinstance(templates, str): templates = [templates] texts =", "some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in", "\"\"\"Tests that the params are cleared in one trip from", "'reply' assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state", "index of the first client action associated with this reply.", "associated with this reply. slots (dict, optional): The slots to", "def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if rule.dialogue_state ==", "= some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0", "assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert", "open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there any stores near", "some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in", "that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives,", "'Sorry I cannot help you. Please try again.') assert_target_dialogue_state(convo, None)", "optional): The index of the first client action associated with", "[templates] texts = set(map(lambda x: x.format(**slots), templates)) assert len(directives) >=", "the params are cleared in one trip from app to", "'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did", "directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not", "pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm", "directives returned by application templates (Union[str, Set[str]]): The reply must", "def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in", "@some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1", "does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow')", "def assert_reply(directives, templates, *, start_index=0, slots=None): \"\"\"Asserts that the provided", "this reply. slots (dict, optional): The slots to fill the", "= convo.process('When does that open?').directives assert_reply(directives, 'Which store would you", "assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder):", "would you like to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path):", "You haven't told me where you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app,", "rule in dm.rules: if rule.dialogue_state == dialogue_state: return True return", "some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules)", "'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow')", "@kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow'", "provided directives contain the specified reply Args: directives (list[dict[str, dict]]):", "i in range(2): directives = convo.process('When does that open?').directives assert_reply(directives,", "== dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent')", "you. Which store would you like to know about?') @pytest.mark.conversation", "start_index (int, optional): The index of the first client action", "again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the", "slots or {} if isinstance(templates, str): templates = [templates] texts", "test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in one", "assert_reply(directives, 'Sorry I cannot help you. Please try again.') assert_target_dialogue_state(convo,", "force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives", "= convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice", "some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass assert", "directives = convo.process('are there any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow')", "templates)) assert len(directives) >= start_index + 1 assert directives[start_index]['name'] ==", "assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context,", "a nice day.']) def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules:", "does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there any", "set(map(lambda x: x.format(**slots), templates)) assert len(directives) >= start_index + 1", "contain the specified reply Args: directives (list[dict[str, dict]]): list of", "= convo.process('are there any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives,", "test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in one", "import Conversation def assert_reply(directives, templates, *, start_index=0, slots=None): \"\"\"Asserts that", "range(2): directives = convo.process('When does that open?').directives assert_reply(directives, 'Which store", "app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives =", "this set. start_index (int, optional): The index of the first", "templates=['Bye', 'Goodbye', 'Have a nice day.']) def assert_dialogue_state(dm, dialogue_state): for", "would you like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives =", "pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def", "directives (list[dict[str, dict]]): list of directives returned by application templates", "that open?').directives assert_reply(directives, 'Which store would you like to know", "know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params", "slots=None): \"\"\"Asserts that the provided directives contain the specified reply", "assert_reply(directives, 'Which store would you like to know about?') assert_target_dialogue_state(convo,", "list of directives returned by application templates (Union[str, Set[str]]): The", "@pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared", "from app to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When", "assert_reply(directives, templates='Sorry, I did not get you. Which store would", "intent='some_intent') async def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow'", "dict]]): list of directives returned by application templates (Union[str, Set[str]]):", "templates \"\"\" slots = slots or {} if isinstance(templates, str):", "that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives,", "= Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow')", "def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in", "trip from app to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True)", "pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder):", "assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice day.']) def", "'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure. You haven't told me where", "in range(2): directives = convo.process('When does that open?').directives assert_reply(directives, 'Which", "len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder): pass", "Set[str]]): The reply must be a member of this set.", "'send_store_hours_flow') for i in range(2): directives = convo.process('When does that", "convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo,", "== 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass assert len(some_handler.rules)", "directives contain the specified reply Args: directives (list[dict[str, dict]]): list", "pytest from mindmeld.components import Conversation def assert_reply(directives, templates, *, start_index=0,", "reply must be a member of this set. start_index (int,", "in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation", "the templates \"\"\" slots = slots or {} if isinstance(templates,", "some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager", "convo.process('When does that open?').directives assert_reply(directives, 'Which store would you like", "assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def", "directives = convo.process('When does that open?').directives assert_reply(directives, 'Which store would", "@pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared", "'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context,", "that the provided directives contain the specified reply Args: directives", "convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the", "assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I", "directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a", "@some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2',", "assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params", "texts = set(map(lambda x: x.format(**slots), templates)) assert len(directives) >= start_index", "for rule in dm.rules: if rule.dialogue_state == dialogue_state: return True", "by application templates (Union[str, Set[str]]): The reply must be a", "know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does that open?').directives", "rule.dialogue_state == dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain',", "start_index + 1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in", "some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent')", "mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?')", "day.']) def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if rule.dialogue_state", "1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) ==", "force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are", "texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def", "optional): The slots to fill the templates \"\"\" slots =", "fill the templates \"\"\" slots = slots or {} if", "def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app,", "of this set. start_index (int, optional): The index of the", "slots = slots or {} if isinstance(templates, str): templates =", "that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there any stores", "like to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that", "def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass assert some_handler.flow_state", "from mindmeld.components import Conversation def assert_reply(directives, templates, *, start_index=0, slots=None):", "= [templates] texts = set(map(lambda x: x.format(**slots), templates)) assert len(directives)", "async def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert", "in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert", "str): templates = [templates] texts = set(map(lambda x: x.format(**slots), templates))", "== 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder): pass assert", "convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo,", "near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure. You haven't", "you like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When", "to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the", "assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not get you. Which", "0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1", "about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does that open?').directives assert_reply(directives,", "None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice day.']) def assert_dialogue_state(dm,", "async def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert", "reply. slots (dict, optional): The slots to fill the templates", "assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2): directives = convo.process('When does", "force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives", "assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context,", "slots to fill the templates \"\"\" slots = slots or", "target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are", "len(directives) >= start_index + 1 assert directives[start_index]['name'] == 'reply' assert", "assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice day.']) def assert_dialogue_state(dm, dialogue_state):", "'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def", "= convo.process('When does that open?').directives assert_reply(directives, 'Sorry I cannot help", "some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def", "@async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder): pass assert some_handler.flow_state ==", "== 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm,", "haven't told me where you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path):", "The slots to fill the templates \"\"\" slots = slots", "you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params", "params are cleared in one trip from app to mm.\"\"\"", "assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass", "The reply must be a member of this set. start_index", "store would you like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives", "def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in", "assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there any stores near me?').directives", "intent='some_intent') def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert", "app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i", "None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are", "I cannot help you. Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation", "'some_handler_flow') assert len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder):", "{} if isinstance(templates, str): templates = [templates] texts = set(map(lambda", "directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state", "slots (dict, optional): The slots to fill the templates \"\"\"", "@some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2", "return True return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def", "kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in one trip", "convo.process('When does that open?').directives assert_reply(directives, 'Sorry I cannot help you.", "def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in", "me where you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that", "exit_flow=True) def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert", "convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('are there", "Conversation def assert_reply(directives, templates, *, start_index=0, slots=None): \"\"\"Asserts that the", "where you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the", "1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder): pass assert len(some_handler.rules)", "start_index=0, slots=None): \"\"\"Asserts that the provided directives contain the specified", "did not get you. Which store would you like to", "convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2):", "responder): pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows", "of the first client action associated with this reply. slots", "@some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) ==", "application templates (Union[str, Set[str]]): The reply must be a member", "to fill the templates \"\"\" slots = slots or {}", "'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not get you. Which store", "len(some_handler.rules) == 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert len(some_handler.rules)", "directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state):", "cleared in one trip from app to mm.\"\"\" convo =", "async def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2',", "(int, optional): The index of the first client action associated", "= convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry, I did not get", "in one trip from app to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app,", "'send_store_hours_flow') directives = convo.process('are there any stores near me?').directives assert_target_dialogue_state(convo,", "'Have a nice day.']) def assert_dialogue_state(dm, dialogue_state): for rule in", "templates='Sorry, I did not get you. Which store would you", "returned by application templates (Union[str, Set[str]]): The reply must be", "'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder):", "assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app):", "@pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared", "some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async", "import pytest from mindmeld.components import Conversation def assert_reply(directives, templates, *,", "be a member of this set. start_index (int, optional): The", "assert_reply(directives, templates=\"I'm not sure. You haven't told me where you", "Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives", "\"\"\"Asserts that the provided directives contain the specified reply Args:", "I did not get you. Which store would you like", "any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure.", "len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass assert", "in dm.rules: if rule.dialogue_state == dialogue_state: return True return False", "assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state ==", "if rule.dialogue_state == dialogue_state: return True return False def test_dialogue_flow_async(async_kwik_e_mart_app):", "test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in one", "dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm, 'some_handler_flow') assert len(some_handler.rules) ==", "convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have a nice day.'])", "assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass", "if isinstance(templates, str): templates = [templates] texts = set(map(lambda x:", "stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure. You", "\"\"\" slots = slots or {} if isinstance(templates, str): templates", "exit_flow=True) async def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2", ">= start_index + 1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text']", "member of this set. start_index (int, optional): The index of", "Args: directives (list[dict[str, dict]]): list of directives returned by application", "test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared in one", "dialogue_state): for rule in dm.rules: if rule.dialogue_state == dialogue_state: return", "one trip from app to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path,", "the first client action associated with this reply. slots (dict,", "force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in", "does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2): directives", "templates (Union[str, Set[str]]): The reply must be a member of", "mindmeld.components import Conversation def assert_reply(directives, templates, *, start_index=0, slots=None): \"\"\"Asserts", "templates = [templates] texts = set(map(lambda x: x.format(**slots), templates)) assert", "reply Args: directives (list[dict[str, dict]]): list of directives returned by", "directives = convo.process('When does that open?').directives assert_reply(directives, 'Sorry I cannot", "def some_flow_handler_2(context, responder): pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2'", "len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain',", "get you. Which store would you like to know about?')", "templates=\"I'm not sure. You haven't told me where you are!\")", "of directives returned by application templates (Union[str, Set[str]]): The reply", "assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does that open?').directives assert_reply(directives, 'Sorry", "that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for i in range(2): directives =", "store would you like to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app,", "@pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are cleared", "(list[dict[str, dict]]): list of directives returned by application templates (Union[str,", "assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo,", "that open?').directives assert_reply(directives, 'Sorry I cannot help you. Please try", "Which store would you like to know about?') @pytest.mark.conversation def", "target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests", "open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye',", "def some_flow_handler(context, responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True)", "= set(map(lambda x: x.format(**slots), templates)) assert len(directives) >= start_index +", "to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('When does that", "does that open?').directives assert_reply(directives, 'Sorry I cannot help you. Please", "assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not sure. You haven't told me", "0 @some_handler.handle(intent='some_intent') async def some_flow_handler(context, responder): pass assert len(some_handler.rules) ==", "== 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) def some_flow_handler_2(context, responder): pass assert len(some_handler.rules)", "to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that", "not get you. Which store would you like to know", "assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler') assert_dialogue_state(dm,", "open?').directives assert_reply(directives, 'Which store would you like to know about?')", "'send_store_hours_flow') directives = convo.process('When does that open?').directives assert_reply(directives, 'Sorry I", "try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that", "assert_target_dialogue_state(convo, target_dialogue_state): assert convo.params.target_dialogue_state == target_dialogue_state @pytest.mark.conversation def test_reprocess_handler(async_kwik_e_mart_app, kwik_e_mart_app_path):", "responder): pass assert len(some_handler.rules) == 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states", "sure. You haven't told me where you are!\") @pytest.mark.conversation def", "== 'reply' assert directives[start_index]['payload']['text'] in texts def assert_target_dialogue_state(convo, target_dialogue_state): assert", "The index of the first client action associated with this", "told me where you are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests", "assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context, responder):", "about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are", "set. start_index (int, optional): The index of the first client", "nice day.']) def assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if", "1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts def", "'Which store would you like to know about?') assert_target_dialogue_state(convo, 'send_store_hours_flow')", "assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye',", "assert len(directives) >= start_index + 1 assert directives[start_index]['name'] == 'reply'", "= slots or {} if isinstance(templates, str): templates = [templates]", "a member of this set. start_index (int, optional): The index", "the provided directives contain the specified reply Args: directives (list[dict[str,", "convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo,", "== 2 assert 'some_flow_handler_2' in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent')", "def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder): pass assert", "action associated with this reply. slots (dict, optional): The slots", "does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None)", "x: x.format(**slots), templates)) assert len(directives) >= start_index + 1 assert", "assert_dialogue_state(dm, dialogue_state): for rule in dm.rules: if rule.dialogue_state == dialogue_state:", "'send_store_hours_flow') directives = convo.process('exit').directives assert_target_dialogue_state(convo, None) assert_reply(directives, templates=['Bye', 'Goodbye', 'Have", "isinstance(templates, str): templates = [templates] texts = set(map(lambda x: x.format(**slots),", "x.format(**slots), templates)) assert len(directives) >= start_index + 1 assert directives[start_index]['name']", "are!\") @pytest.mark.conversation def test_default_handler(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests that the params are", "with this reply. slots (dict, optional): The slots to fill", "you. Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app, kwik_e_mart_app_path):", "app to mm.\"\"\" convo = Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does", "def some_handler(context, responder): pass assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler'", "assert some_handler.flow_state == 'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm =", "cannot help you. Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def", "there any stores near me?').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates=\"I'm not", "False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder): pass", "Conversation(app=async_kwik_e_mart_app, app_path=kwik_e_mart_app_path, force_sync=True) convo.process('When does that open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') for", "open?') assert_target_dialogue_state(convo, 'send_store_hours_flow') directives = convo.process('Howdy!').directives assert_target_dialogue_state(convo, 'send_store_hours_flow') assert_reply(directives, templates='Sorry,", "+ 1 assert directives[start_index]['name'] == 'reply' assert directives[start_index]['payload']['text'] in texts", "*, start_index=0, slots=None): \"\"\"Asserts that the provided directives contain the", "in some_handler.exit_flow_states def test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass", "you like to know about?') @pytest.mark.conversation def test_repeated_flow(async_kwik_e_mart_app, kwik_e_mart_app_path): \"\"\"Tests", "'some_handler_flow' assert 'some_handler' in some_handler.all_flows dm = some_handler.dialogue_manager assert_dialogue_state(dm, 'some_handler')", "dm.rules: if rule.dialogue_state == dialogue_state: return True return False def", "first client action associated with this reply. slots (dict, optional):", "test_dialogue_flow(kwik_e_mart_app): @kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') def some_handler(context, responder): pass assert some_handler.flow_state ==", "'Goodbye', 'Have a nice day.']) def assert_dialogue_state(dm, dialogue_state): for rule", "responder): pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def", "templates, *, start_index=0, slots=None): \"\"\"Asserts that the provided directives contain", "== 0 @some_handler.handle(intent='some_intent') def some_flow_handler(context, responder): pass assert len(some_handler.rules) ==", "help you. Please try again.') assert_target_dialogue_state(convo, None) @pytest.mark.conversation def test_intent_handler_and_exit_flow(async_kwik_e_mart_app,", "return False def test_dialogue_flow_async(async_kwik_e_mart_app): @async_kwik_e_mart_app.dialogue_flow(domain='some_domain', intent='some_intent') async def some_handler(context, responder):", "that the params are cleared in one trip from app", "pass assert len(some_handler.rules) == 1 @some_handler.handle(intent='some_intent_2', exit_flow=True) async def some_flow_handler_2(context," ]
[ "vmin, vmax = dense.min(), dense.max() if vmin * vmax <", "plt from matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig =", "matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect", "cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2])", "Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X, Y)", "origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm,", "Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e')", "data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1])", "Xm, Ym = np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max()", "levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6))", "plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat') os.remove('data0.dat')", "2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym =", "= plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',')", "vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr =", "= np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat',", "levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black',", "plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat') os.remove('data0.dat') os.remove('data1.dat') webbrowser.open('file://{}'.format(os.path.realpath('{}.svg'.format(sys.argv[1]))))", "from matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure()", "dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0],", "'monospace' fig = plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 =", "data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense =", "plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight',", "0.05) Xm, Ym = np.meshgrid(X, Y) vmin, vmax = dense.min(),", "vmax < 0: vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin,", "0: vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr", "rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 =", "linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb =", "numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm", "= dense.min(), dense.max() if vmin * vmax < 0: vmin", "vmin * vmax < 0: vmin = -abs(max(-vmin, vmax)) vmax", "fig = plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat',", "rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower')", "dense.max() if vmin * vmax < 0: vmin = -abs(max(-vmin,", "marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\")", "import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.cm as", "vmax = dense.min(), dense.max() if vmin * vmax < 0:", "as cm import matplotlib.pylab as plt from matplotlib import ticker", "import os,sys import webbrowser import numpy as np import matplotlib", "data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue',", "as np import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import", "X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID),", "import matplotlib.cm as cm import matplotlib.pylab as plt from matplotlib", "np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym", "vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1],", "np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X", "if vmin * vmax < 0: vmin = -abs(max(-vmin, vmax))", "+abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]),", "matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab as plt", "facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\")", "plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black',", "plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect = fig.add_subplot(111, aspect='equal')", "= np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X = np.arange(-2.0, 2.05,", "np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X, Y) vmin, vmax", "webbrowser import numpy as np import matplotlib matplotlib.use('Agg') import matplotlib.cm", "= -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y),", "np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X = np.arange(-2.0, 2.05, 0.05)", "linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0],", "os,sys import webbrowser import numpy as np import matplotlib matplotlib.use('Agg')", "Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1,", "= np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X, Y) vmin,", "s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0],", "as plt from matplotlib import ticker plt.rcParams['font.family'] = 'monospace' fig", "cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1)", "= +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0],", "matplotlib.pylab as plt from matplotlib import ticker plt.rcParams['font.family'] = 'monospace'", "ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect = fig.add_subplot(111,", "dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X = np.arange(-2.0,", "colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12)", "fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',')", "edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted')", "plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym,", "= np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm,", "import webbrowser import numpy as np import matplotlib matplotlib.use('Agg') import", "np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',')", "Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1)", "s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID),", "X = np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05, 0.05)", "plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr,", "= rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm,", "aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense", "= sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0,", "plt.figure() rect = fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1", "ID = sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y =", "< 0: vmin = -abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax))", "extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym,", "rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^',", "np import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab", "= plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red',", "lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight',", "data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID =", "2.05, 0.05) Xm, Ym = np.meshgrid(X, Y) vmin, vmax =", "= fig.add_subplot(111, aspect='equal') data0 = np.loadtxt('data0.dat', delimiter=',') data1 = np.loadtxt('data1.dat',", "Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1],", "marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black',", "= np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max() if vmin", "lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1])", "linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb", "Y) vmin, vmax = dense.min(), dense.max() if vmin * vmax", "Ym = np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max() if", "cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax,", "Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense,", "delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1] X =", "= np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID = sys.argv[1]", "cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2])", "vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr,", "vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense, levels=[-1, 1], cmap=cm.bwr, linestyles='dashed',", "cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0],", "delimiter=',') data1 = np.loadtxt('data1.dat', delimiter=',') dense = np.loadtxt('dense.dat', delimiter=',') ID", "sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y = np.arange(-2.0, 2.05,", "cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1],", "rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1) plt.xlim(X[0], X[-1]) plt.ylim(Y[0],", "X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm, Ym, dense,", "format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v', facecolor='red', edgecolor='black', s=30,", "import matplotlib.pylab as plt from matplotlib import ticker plt.rcParams['font.family'] =", "1], cmap=cm.bwr, linestyles='dashed', linewidths=[2,2]) plt.contour(Xm, Ym, dense, levels=[0], colors='black', linestyles='dashed',", "plt.xlim(X[0], X[-1]) plt.ylim(Y[0], Y[-1]) plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1)", "vmax)) vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0],", "dense, levels=[0], colors='black', linestyles='dashed', linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face')", "edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30, lw=1)", "vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin,", "* vmax < 0: vmin = -abs(max(-vmin, vmax)) vmax =", "plt.xlabel(\"\") plt.ylabel(\"\") plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat')", "-abs(max(-vmin, vmax)) vmax = +abs(max(-vmin, vmax)) cr = rect.imshow(dense.reshape((len(Y), len(X))),", "= 'monospace' fig = plt.figure() rect = fig.add_subplot(111, aspect='equal') data0", "facecolor='red', edgecolor='black', s=30, lw=1) rect.scatter(data1[:,0], data1[:,1], marker='^', facecolor='blue', edgecolor='black', s=30,", "cm import matplotlib.pylab as plt from matplotlib import ticker plt.rcParams['font.family']", "linewidths=[2]) cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1],", "len(X))), extent=(X[0], X[-1], Y[0], Y[-1]), vmin=vmin, vmax=vmax, cmap=cm.coolwarm, origin='lower') plt.contour(Xm,", "matplotlib.cm as cm import matplotlib.pylab as plt from matplotlib import", "matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab as plt from", "import ticker plt.rcParams['font.family'] = 'monospace' fig = plt.figure() rect =", "0.05) Y = np.arange(-2.0, 2.05, 0.05) Xm, Ym = np.meshgrid(X,", "plt.grid(ls='dotted') plt.savefig('{}.svg'.format(ID), bbox_inches='tight', pad_inches=0.1) plt.savefig('{}.eps'.format(ID), bbox_inches='tight', pad_inches=0.1) os.remove('dense.dat') os.remove('data0.dat') os.remove('data1.dat')", "delimiter=',') ID = sys.argv[1] X = np.arange(-2.0, 2.05, 0.05) Y", "cb = plt.colorbar(cr, format='%+.1e') cb.solids.set_edgecolor('face') cb.set_ticks(ticker.LinearLocator(6)) cb.ax.tick_params(labelsize=12) rect.scatter(data0[:,0], data0[:,1], marker='v',", "import matplotlib matplotlib.use('Agg') import matplotlib.cm as cm import matplotlib.pylab as", "np.meshgrid(X, Y) vmin, vmax = dense.min(), dense.max() if vmin *", "dense.min(), dense.max() if vmin * vmax < 0: vmin =" ]
[ "RenderStyle def score_to_docx(score: Score, style: RenderStyle, lang: Language): ret =", "sarna.report_generator.style import RenderStyle def score_to_docx(score: Score, style: RenderStyle, lang: Language):", "sarna.model.enums import Score, Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice", "score.name.lower()), locale_choice(score, lang)) for warn in style._warnings: # TODO: something", "import make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle", "Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn in", "<gh_stars>10-100 from sarna.model.enums import Score, Language from sarna.report_generator import make_run", "sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import", "RenderStyle, lang: Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for", "def score_to_docx(score: Score, style: RenderStyle, lang: Language): ret = make_run(getattr(style,", "from sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style", "import locale_choice from sarna.report_generator.style import RenderStyle def score_to_docx(score: Score, style:", "make_run from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle def", "import RenderStyle def score_to_docx(score: Score, style: RenderStyle, lang: Language): ret", "ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn in style._warnings:", "from sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle def score_to_docx(score:", "lang)) for warn in style._warnings: # TODO: something print(warn) return", "sarna.report_generator.locale_choice import locale_choice from sarna.report_generator.style import RenderStyle def score_to_docx(score: Score,", "for warn in style._warnings: # TODO: something print(warn) return ret", "Score, style: RenderStyle, lang: Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score,", "import Score, Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice import", "style: RenderStyle, lang: Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang))", "Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice from", "score_to_docx(score: Score, style: RenderStyle, lang: Language): ret = make_run(getattr(style, score.name.lower()),", "locale_choice(score, lang)) for warn in style._warnings: # TODO: something print(warn)", "from sarna.report_generator.style import RenderStyle def score_to_docx(score: Score, style: RenderStyle, lang:", "= make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn in style._warnings: #", "lang: Language): ret = make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn", "make_run(getattr(style, score.name.lower()), locale_choice(score, lang)) for warn in style._warnings: # TODO:", "Score, Language from sarna.report_generator import make_run from sarna.report_generator.locale_choice import locale_choice", "from sarna.model.enums import Score, Language from sarna.report_generator import make_run from", "locale_choice from sarna.report_generator.style import RenderStyle def score_to_docx(score: Score, style: RenderStyle," ]
[ "req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc =", "ok: raise Exception(\"connect radio work not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"],", "on client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'],", "assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP with open", "BSD license. # See README for more details. import logging", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This", "event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with", "polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected()", "i in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i", "some traffic sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1')", "timeout=10) if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\")", "import os import hostapd import hwsim_utils from tshark import run_tshark", "{} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP send", "func): started = False try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" })", "aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \"", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\")", "br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open", "+ ev) if dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str mismatch\") def", "'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10)", "apdev): \"\"\"AP with open mode and beaconing disabled (2)\"\"\" hapd", "get much useful output with mac80211_hwsim currently, # but run", "with open mode and beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\",", "\"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First,", "entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open mode getting disabled", "'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr',", "str(e)) else: raise # Next, add the bridge interface and", "timeout=5) if ev is None: raise Exception(\"No INTERFACE-DISABLED event\") #", "\"start_disabled\": \"1\" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if", "matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15)", "dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id) if \"connect@\"", "# First, try a failure case of adding an interface", "useful output with mac80211_hwsim currently, # but run through the", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False)", "% assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP with", "freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise", "= {} resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa'] =", "func): with alloc_fail(hapd, count, func): started = False try: hostapd.add_ap(apdev['ifname'],", "action=1100\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not send", "open mode and beaconing disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "res = dev[0].request(\"BLACKLIST\") if bssid1 in res or bssid2 in", "association\"\"\" ssid = \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "open mode (no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "11: break req = None if not req: raise Exception(\"Authentication", "test_ap_open_ifdown(dev, apdev): \"\"\"AP with open mode and external ifconfig down\"\"\"", "state == 0: state = 1 elif pvb == 0", "3: raise Exception(\"Association Request frames not received: assoc=%d\" % assoc)", "started = True except: pass if started: raise Exception(\"hostapd interface", "kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'],", "new interface can still be added when memory allocation does", "params = { \"ssid\": \"open\", \"ap_max_inactivity\": \"1\" } hapd =", "a kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open mode configuration and large", "hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def", "distributed under the terms of the BSD license. # See", "send couple of Beacon frames time.sleep(0.3) # disconnect - with", "\"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not send test Action frame\")", "def hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd, count, func): started", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "ok = False for i in range(30): if \"connect@\" not", "in range(0, 10): req = hapd.mgmt_rx() if req is None:", "apdev): \"\"\"AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd =", "no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']])", "bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add(\"wlan5\", drv_params=\"force_connect_cmd=1\")", "br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname,", "\"open\" }) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\":", "def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open mode and beaconing disabled\"\"\"", "will be accepted due to matching network subprocess.call(['iw', 'dev', dev[0].ifname,", "Exception(\"hostapd interface started even with memory allocation failure: \" +", "raise Exception(\"AP did not ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev):", "times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd)", "unexpected AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected to", "scan_freq=\"2412\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\")", "out\") if req['subtype'] == 11: break req = None if", "subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev =", "channel survey update couple of times for i in range(0,", "\"open\" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')", "hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "a new interface can still be added when memory allocation", "Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "README for more details. import logging logger = logging.getLogger() import", "subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10)", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected()", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev", "range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from", "the BSD license. # See README for more details. import", "'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\",", "to allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted", "the channel survey update couple of times for i in", "not received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev,", "Copyright (c) 2014, Qualcomm Atheros, Inc. # # This software", "for i in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for", "not ack Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open", "interface in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally:", "'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev):", "False try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" }) started = True", "\"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 10):", "case of adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface", "res or bssid2 in res: raise Exception(\"Unexpected blacklist entry(2)\") def", "mode and beaconing disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition succeeded unexpectedly\")", "= dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG", "open mode configuration and large packet loss\"\"\" params = {", "!= \"foo\": raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP with", "'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl',", "{ \"ssid\": \"open\" }) started = True except: pass if", "hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white", "hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa = hapd.own_addr() da", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i in", "\" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00", "if assoc == 3: break if assoc != 3: raise", "key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\" not", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for", "assoc == 3: break if assoc != 3: raise Exception(\"Association", "test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ],", "raise Exception(\"CTRL-EVENT-CONNECT did not have matching id_str: \" + ev)", "\"\"\"AP with open mode getting disabled and re-enabled\"\"\" hapd =", "}) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for i in range(1, 3):", "Exception(\"MGMT RX wait timed out\") if req['subtype'] == 0: assoc", "bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not", "'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr',", "is used to test stopping of AP side functionality on", "ev is not None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev,", "the interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0'])", "key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id) if \"connect@\" not", "dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is None: raise", "only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception(\"AP was seen", "frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None: raise", "= hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" }) bssid2 = apdev[1]['bssid'] id1", "except Exception, e: if \"Failed to add\" in str(e): logger.info(\"Ignore", "res or bssid2 in res: raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0],", "open mode getting disabled and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "with open mode and select any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK", "time.sleep(0.3) # disconnect - with traffic pending - shouldn't cause", "ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None: raise Exception(\"Timeout while", "association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a new interface", "Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev", "'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for", "raise Exception(\"No connection event received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\")", "bssid2 in res: raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2)", "in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for i in", "Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid)", "\"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for", "ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None: raise Exception(\"Timeout", "if dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev):", "resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload']", "assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP", "while waiting radio work to start\") id = dev.connect(\"open\", key_mgmt=\"NONE\",", "raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev):", "if ev is None: raise Exception(\"Timeout while waiting radio work", "but run through the channel survey update couple of times", "to no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\",", "radio work to start\") id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True)", "} hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3):", "seen beaconing\") if \"OK\" not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\")", "scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected", "tshark import run_tshark from utils import alloc_fail from wpasupplicant import", "to change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "raise Exception(\"dev[1] connected to unexpected AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']:", "disabled and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 10): req =", "0: assoc += 1 if assoc == 3: break if", "apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" +", "apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected AP\") if dev[1].get_status_field('bssid') !=", "raise Exception(\"hostapd interface started even with memory allocation failure: \"", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 10): req", "after inactivity related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "connected to unexpected AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2]", "= dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id) if", "dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev", "timed out\") if req['subtype'] == 0: assoc += 1 if", "dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with", "timeout=5) if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\")", "\"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0,", "key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5)", "'0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set',", "pvb == 0 and state == 1: state = 2", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\" }) bssid", "\"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this does", "params = { \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\":", "\"ssid\": \"open\" }) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\" hapd", "'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw',", "for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd,", "= \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\")", "break time.sleep(0.1) if not ok: raise Exception(\"connect radio work not", "0') # let the AP send couple of Beacon frames", "= hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {}", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() + \"", "{ \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\")", "\"result=SUCCESS\" not in ev: raise Exception(\"AP did not ack Action", "== 1: state = 2 if state != 2: raise", "mode AP tests # Copyright (c) 2014, Qualcomm Atheros, Inc.", "2, 0) hapd.mgmt_tx(resp) assoc = 0 for i in range(0,", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\")", "if not ok: raise Exception(\"connect radio work not completed\") ev", "ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid =", "= dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id)", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will", "unexpected AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2] connected to", "+= 1 if assoc == 3: break if assoc !=", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10)", "hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if ev", "\"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\",", "is None: raise Exception(\"No disconnection event received from hostapd\") def", "\" + work_id) ok = False for i in range(30):", "dev.request(\"RADIO_WORK show\"): ok = True break time.sleep(0.1) if not ok:", "\"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this does not really", "req = hapd.mgmt_rx() if req is None: raise Exception(\"MGMT RX", "for i in range(0, 10): req = hapd.mgmt_rx() if req", "None: raise Exception(\"No connection event received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0],", "on MGMT-TX-STATUS\") if \"result=SUCCESS\" not in ev: raise Exception(\"AP did", "finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"])", "dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing", "alloc_fail(hapd, count, func): started = False try: hostapd.add_ap(apdev['ifname'], { \"ssid\":", "AP side functionality on client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link',", "0 and state == 0: state = 1 elif pvb", "from hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open mode configuration", "\"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd =", "= req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if", "ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise", "was seen beaconing\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON", "out\") if req['subtype'] == 0: assoc += 1 if assoc", "matching id_str: \" + ev) if dev[0].get_status_field(\"id_str\") != \"foo\": raise", "SELECT_NETWORK to change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition succeeded", "Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev):", "from utils import alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev,", "\"1\" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid)", "\"ssid\": \"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" })", "frames time.sleep(0.3) # disconnect - with traffic pending - shouldn't", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid1 = apdev[0]['bssid'] hapd2 =", "count, func): started = False try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\"", "still be added when memory allocation does # not fail", "hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the", "if ev is None or \"result=SUCCESS\" not in ev: raise", "Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open mode AP", "raise Exception(\"dev[0] connected to unexpected AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']:", "br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname,", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP with open mode and select", "select any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "ev is None or \"result=SUCCESS\" not in ev: raise Exception(\"AP", "AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "used to test stopping of AP side functionality on client", "{ \"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') #", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\")", "hapd.mgmt_rx() if req is None: raise Exception(\"MGMT RX wait timed", "{} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP", "ev is not None: raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\")", "raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if", "l in out.splitlines(): pvb = int(l, 16) if pvb >", "+ dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\")", "subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr',", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1], 1,", "apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for i in", "apdev[1], i, \"hostapd_iface_alloc\") for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1],", "open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "{} freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could", "scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412", "= logging.getLogger() import struct import subprocess import time import os", "apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise", "\"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP with open mode and", "WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP with open mode (no security)", "open mode and unexpected association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "raise Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid')", "= False for i in range(30): if \"connect@\" not in", "interface can still be added when memory allocation does #", "bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set',", "time.sleep(0.1) if not ok: raise Exception(\"connect radio work not completed\")", "= hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if ev is None: raise", "ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None or \"result=SUCCESS\"", "tests beacon loss detection in mac80211 on dev0. # dev1", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\")", "in mac80211 on dev0. # dev1 is used to test", "\"\"\"AP with open mode and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "= True break time.sleep(0.1) if not ok: raise Exception(\"connect radio", "apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=1100\".format(bssid, bssid) if", "dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP with open mode (no security)", "raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev,", "hapd.request('DATA_TEST_CONFIG 0') # let the AP send couple of Beacon", "== 0 and state == 1: state = 2 if", "hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\") if bssid1", "freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open", "# Copyright (c) 2014, Qualcomm Atheros, Inc. # # This", "\"ssid\": \"open\" }) started = True except: pass if started:", "'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname])", "bssid2 = apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2", "hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\") if bssid1 in res or", "hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\",", "apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=765432\".format(bssid, bssid) if", "\"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for", "not in ev: raise Exception(\"AP did not ack Action frame\")", "bridge interface and add the interface again subprocess.call(['brctl', 'addbr', br_ifname])", "\" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev, apdev):", "raise Exception(\"Could not send test Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"],", "only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected()", "mode connection and SELECT_NETWORK to change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'],", "= dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "result in disconnection due to no matching network subprocess.call(['iw', 'dev',", "# verify that a new interface can still be added", "= \"MGMT_TX {} {} freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\" in", "missing\") dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK done \" + work_id)", "hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\",", "AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2] connected to unexpected", "test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open mode configuration and unknown Action", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE", "i, \"hostapd_iface_alloc\") for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i,", "dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"])", "configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0')", "dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP", "not ok: raise Exception(\"connect radio work not completed\") ev =", "raise Exception(\"AP was seen beaconing\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"):", "and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "\"\"\"AP with open mode (no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "\"open2\" }) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\" }) bssid = apdev[0]['bssid']", "dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is None:", "configuration and invalid WMM Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set',", "add\" in str(e): logger.info(\"Ignore expected interface_add failure due to missing", "+ work_id) ok = False for i in range(30): if", "Exception(\"AP was seen beaconing\") if \"OK\" not in hapd.request(\"RELOAD\"): raise", "open mode configuration and invalid WMM Action frame\"\"\" hapd =", "raise Exception(\"AP was seen beaconing\") if \"OK\" not in hapd.request(\"RELOAD\"):", "key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2)", "if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\")", "scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in", "Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open mode configuration", "\"open\", \"start_disabled\": \"1\" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True)", "ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is None: raise Exception(\"No", "hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if bssid1 in", "apdev, params): \"\"\"Disconnect with the client in PS to regression-test", "Exception(\"dev[2] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK", "= WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure case of adding", "on dev0. # dev1 is used to test stopping of", "all\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev", "the terms of the BSD license. # See README for", "to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\",", "of AP side functionality on client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip',", "external ifconfig down\"\"\" params = { \"ssid\": \"open\", \"ap_max_inactivity\": \"1\"", "raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev,", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5)", "\"ssid\": \"open\" }) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], {", "raise Exception(\"No disconnection event received from hostapd\") def test_ap_open_packet_loss(dev, apdev):", "'4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\",", "beaconing\") if \"OK\" not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid,", "connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open mode", "subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev,", "i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for i", "connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\")", "dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open mode and unexpected", "network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def", "req: raise Exception(\"Authentication frame not received\") resp = {} resp['fc']", "scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') !=", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" +", "\"connect@\" not in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio work missing\")", "%d\" % id) dev.request(\"RADIO_WORK done \" + work_id) ok =", "dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise Exception(\"Unexpected connection\")", "add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None: raise", "hapd2) res = dev[0].request(\"BLACKLIST\") if bssid1 in res or bssid2", "if req is None: raise Exception(\"MGMT RX wait timed out\")", "\" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] +", "any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) id", "= 0 for i in range(0, 10): req = hapd.mgmt_rx()", "], timeout=5) if ev is None: raise Exception(\"No disconnection event", "is not None: state = 0 for l in out.splitlines():", "timeout=10) if ev is None or \"result=SUCCESS\" not in ev:", "if pvb > 0 and state == 0: state =", "in range(30): if \"connect@\" not in dev.request(\"RADIO_WORK show\"): ok =", "under the terms of the BSD license. # See README", "\"connect@\" not in dev.request(\"RADIO_WORK show\"): ok = True break time.sleep(0.1)", "= dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None or \"result=SUCCESS\" not", "1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for i in range(1,", "}) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i", "dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in ev: raise Exception(\"CTRL-EVENT-CONNECT did not", "call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that", "}) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True)", "dev[0].dump_monitor() # This will be accepted due to matching network", "inactivity related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "scan_freq=\"2412\", wait_connect=False) for i in range(0, 10): req = hapd.mgmt_rx()", "bssid = apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=1100\".format(bssid,", "# Next, add the bridge interface and add the interface", "from tshark import run_tshark from utils import alloc_fail from wpasupplicant", "or \"result=SUCCESS\" not in ev: raise Exception(\"AP did not ack", "unexpected association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "in out.splitlines(): pvb = int(l, 16) if pvb > 0", "i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a new", "output with mac80211_hwsim currently, # but run through the channel", "to open mode AP after inactivity related disconnection\"\"\" hapd =", "import hwsim_utils from tshark import run_tshark from utils import alloc_fail", "scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open mode and beaconing", "of the BSD license. # See README for more details.", "'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on'])", "apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP with open mode (no", "# dev1 is used to test stopping of AP side", "+ \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid']", "dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is None:", "in res or bssid2 in res: raise Exception(\"Unexpected blacklist entry\")", "large packet loss\"\"\" params = { \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\",", "connection event received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: #", "connection and SELECT_NETWORK to change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], {", "apdev): \"\"\"AP with open mode (no security) configuration\"\"\" hapd =", "logger = logging.getLogger() import struct import subprocess import time import", "network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor()", "hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func):", "range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open mode", "due to allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) def test_bssid_black_white_list(dev, apdev):", "is None or \"result=SUCCESS\" not in ev: raise Exception(\"AP did", "setup interface due to allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "pvb > 0 and state == 0: state = 1", "Open mode AP tests # Copyright (c) 2014, Qualcomm Atheros,", "beacon loss detection in mac80211 on dev0. # dev1 is", "work missing\") dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK done \" +", "= True except: pass if started: raise Exception(\"hostapd interface started", "= hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid'])", "and wpas interface in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev,", "raise Exception(\"No INTERFACE-DISABLED event\") # The following wait tests beacon", "Exception(\"Didn't observe TIM bit getting set and unset (state=%d)\" %", "raise Exception(\"MGMT RX wait timed out\") if req['subtype'] == 0:", "test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open mode and beaconing disabled\"\"\" hapd", "\" + arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to setup", "only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"],", "in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev,", "%d\" % id) if \"connect@\" not in dev.request(\"RADIO_WORK show\"): raise", "is None: raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def", "raise Exception(\"Association Request frames not received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\",", "\"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev,", "interface and add the interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl',", "}) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add(\"wlan5\",", "and select any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "be distributed under the terms of the BSD license. #", "dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in disconnection due to", "or bssid2 in res: raise Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev,", "even with memory allocation failure: \" + arg) def test_ap_open_out_of_memory(dev,", "for l in out.splitlines(): pvb = int(l, 16) if pvb", "raise Exception(\"dev[1] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\")", "apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif',", "from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this does not really get", "WMM Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "} hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\",", "def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP and wpas interface in", "raise # Next, add the bridge interface and add the", "= dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in ev: raise Exception(\"CTRL-EVENT-CONNECT did", "timed out\") if req['subtype'] == 11: break req = None", "Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open mode", "run_tshark from utils import alloc_fail from wpasupplicant import WpaSupplicant def", "req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0)", "Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas", "expected interface_add failure due to missing bridge interface: \" +", "not None: state = 0 for l in out.splitlines(): pvb", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i in range(2): hapd.request(\"DISABLE\")", "mode AP after inactivity related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "None: raise Exception(\"MGMT RX wait timed out\") if req['subtype'] ==", "hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da,", "3: break if assoc != 3: raise Exception(\"Association Request frames", "ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure case", "== 0: state = 1 elif pvb == 0 and", "be accepted due to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect',", "(no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None: raise Exception(\"Timeout on MGMT-TX-STATUS\")", "if dev[0].get_bss(bssid) is not None: raise Exception(\"AP was seen beaconing\")", "show\"): ok = True break time.sleep(0.1) if not ok: raise", "\"id_str=foo\" not in ev: raise Exception(\"CTRL-EVENT-CONNECT did not have matching", "mode and beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\",", "raise Exception(\"No connection event received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL)", "1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let", "subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname,", "= apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" }) bssid2", "survey update couple of times for i in range(0, 10):", "wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev):", "scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"],", "for i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd)", "id) if \"connect@\" not in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio", "not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected()", "bssid1 in res or bssid2 in res: raise Exception(\"Unexpected blacklist", "resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid']", "mode (no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\",", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\")", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0],", "= dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None: raise Exception(\"Timeout while waiting", "INTERFACE-DISABLED event\") # The following wait tests beacon loss detection", "and invalid WMM Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if", "of times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0],", "in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK", "assoc != 3: raise Exception(\"Association Request frames not received: assoc=%d\"", "def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\"", "connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP and", "timeout=10) if ev is None: raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected()", "if \"id_str=foo\" not in ev: raise Exception(\"CTRL-EVENT-CONNECT did not have", "= { \"ssid\": \"open\", \"ap_max_inactivity\": \"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'],", "detection in mac80211 on dev0. # dev1 is used to", "3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 3):", "mode getting disabled and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "with open mode and unexpected association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev',", "scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\",", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev',", "and unknown Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "dev.request(\"ENABLE_NETWORK %d\" % id) if \"connect@\" not in dev.request(\"RADIO_WORK show\"):", "apdev[1], 1, \"hostapd_driver_init\") for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1],", "+ arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to setup interface", "ev is None: raise Exception(\"No INTERFACE-DISABLED event\") # The following", "Exception(\"Interface addition succeeded unexpectedly\") except Exception, e: if \"Failed to", "> 0 and state == 0: state = 1 elif", "'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor()", "dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\") if bssid1 in res", "started = False try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" }) started", "dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with the", "ok = True break time.sleep(0.1) if not ok: raise Exception(\"connect", "disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "not None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP", "for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0],", "frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "# See README for more details. import logging logger =", "= run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not None:", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "state = 1 elif pvb == 0 and state ==", "state) def test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection and SELECT_NETWORK to", "True except: pass if started: raise Exception(\"hostapd interface started even", "key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\")", "bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid'])", "'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname,", "interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition succeeded unexpectedly\") except", "if \"result=SUCCESS\" not in ev: raise Exception(\"AP did not ack", "\"MGMT_TX {} {} freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd):", "only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id) if \"connect@\" not in dev.request(\"RADIO_WORK", "\"\"\"AP with open mode and external ifconfig down\"\"\" params =", "def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open mode and unexpected association", "received\") resp = {} resp['fc'] = req['fc'] resp['da'] = req['sa']", "hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if", "\"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this does not", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\",", "hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "is None: raise Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\" in ev:", "hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if", "raise Exception(\"Authentication frame not received\") resp = {} resp['fc'] =", "dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev", "dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP with open mode and id_str\"\"\"", "allocation failure: \" + arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing", "hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK", "ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is None: raise Exception(\"Timeout", "in ev: raise Exception(\"CTRL-EVENT-CONNECT did not have matching id_str: \"", "assoc = 0 for i in range(0, 10): req =", "PS to regression-test a kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with the client in", "def test_ap_open_ifdown(dev, apdev): \"\"\"AP with open mode and external ifconfig", "raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP with open mode", "This will be accepted due to matching network subprocess.call(['iw', 'dev',", "bg_scan_period=\"0\") for i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0],", "from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a", "# Open mode AP tests # Copyright (c) 2014, Qualcomm", "\"\"\"AP with open mode configuration and large packet loss\"\"\" params", "scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if ev", "}) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a", "Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None:", "res: raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0],", "Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP", "ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP", "True break time.sleep(0.1) if not ok: raise Exception(\"connect radio work", "state = 0 for l in out.splitlines(): pvb = int(l,", "with traffic pending - shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\") finally:", "test_ap_open_select_any(dev, apdev): \"\"\"AP with open mode and select any network\"\"\"", "struct import subprocess import time import os import hostapd import", "\"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP with open mode", "functionality on client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set', 'dev',", "\"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This", "req = None if not req: raise Exception(\"Authentication frame not", "= dev[0].request(\"BLACKLIST\") if bssid1 in res or bssid2 in res:", "show\"): raise Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK %d\" % id)", "def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to setup interface due to", "3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for i in range(1, 5):", "ev is None: raise Exception(\"No disconnection event received from hostapd\")", "raise Exception(\"Timeout while waiting radio work to start\") id =", "open mode and beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up'])", "to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"],", "Exception(\"connect radio work not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if", "sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK add block-work\") ev", "AP after inactivity related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15)", "_test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) br_ifname='sta-br0'", "}) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\"", "seen beaconing\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\")", "dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\",", "Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\" not in ev: raise Exception(\"AP", "couple of Beacon frames time.sleep(0.3) # disconnect - with traffic", "dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP", "raise Exception(\"Interface addition succeeded unexpectedly\") except Exception, e: if \"Failed", "int(l, 16) if pvb > 0 and state == 0:", "'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl',", "time import os import hostapd import hwsim_utils from tshark import", "assoc += 1 if assoc == 3: break if assoc", "struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc = 0 for i", "raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open", "+ apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \"", "'dev', br_ifname, 'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif',", "dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP with open mode", "all\") dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is", "\"\"\"AP with open mode and unexpected association event\"\"\" hapd =", "interface_add failure due to missing bridge interface: \" + str(e))", "Beacon frames time.sleep(0.3) # disconnect - with traffic pending -", "ev is None: raise Exception(\"No connection event received from hostapd\")", "test_ap_bss_load(dev, apdev): \"\"\"AP with open mode (no security) configuration\"\"\" hapd", "ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is None: raise Exception(\"Timeout", "apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev", "bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip',", "if \"Failed to add\" in str(e): logger.info(\"Ignore expected interface_add failure", "{ \"ssid\": \"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\"", "memory allocation failure: \" + arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "2014, Qualcomm Atheros, Inc. # # This software may be", "None: state = 0 for l in out.splitlines(): pvb =", "key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev =", "scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid']", "\"\"\"Disconnect with the client in PS to regression-test a kernel", "configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\": \"10\" })", "if \"connect@\" not in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio work", "raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with", "if bssid1 in res or bssid2 in res: raise Exception(\"Unexpected", "getting set and unset (state=%d)\" % state) def test_ap_open_select_network(dev, apdev):", "if out is not None: state = 0 for l", "stopping of AP side functionality on client polling. dev[1].request(\"REMOVE_NETWORK all\")", "bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not send test", "dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if bssid1 in res or", "dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK", "Exception(\"No connection event received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev", "apdev): \"\"\"AP timing out association\"\"\" ssid = \"test\" hapd =", "finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname,", "subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev,", "hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open mode configuration and", "unset (state=%d)\" % state) def test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection", "in dev.request(\"RADIO_WORK show\"): ok = True break time.sleep(0.1) if not", "with open mode (no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "to add\" in str(e): logger.info(\"Ignore expected interface_add failure due to", "\"AP-STA-DISCONNECTED\" ], timeout=5) if ev is None: raise Exception(\"No disconnection", "connection event received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev =", "is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"],", "apdev): \"\"\"AP with open mode configuration and large packet loss\"\"\"", "= apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 =", "open mode and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2] connected to unexpected AP\")", "that a new interface can still be added when memory", "Exception(\"AP did not ack Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP", "subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd =", "}) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "{ \"ssid\": \"open\", \"start_disabled\": \"1\" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache()", "apdev): \"\"\"Open mode connection and SELECT_NETWORK to change network\"\"\" hapd1", "key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open mode and", "{ \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\":", "'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP with open", "through the channel survey update couple of times for i", "was seen beaconing\") if \"OK\" not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD", "mac80211_hwsim currently, # but run through the channel survey update", "\"\"\"hostapd failing to setup interface due to allocation failure\"\"\" hapd", "bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to", "\"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params)", "Exception(\"MGMT RX wait timed out\") if req['subtype'] == 11: break", "req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH',", "2 if state != 2: raise Exception(\"Didn't observe TIM bit", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this does not really get much", "dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open mode configuration and", "dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"):", "is None: raise Exception(\"No connection event received from hostapd\") hwsim_utils.test_connectivity(dev[0],", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0]", "Exception(\"AP was seen beaconing\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise", "and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if ev is None:", "1: state = 2 if state != 2: raise Exception(\"Didn't", "dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK %d\" %", "if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev", "dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted due to matching", "wait_connect=False) ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in ev: raise", "dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\",", "if ev is None: raise Exception(\"No connection event received from", "AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if", "i in range(0, 10): req = hapd.mgmt_rx() if req is", "wpas interface in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev)", "apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1,", "disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\" })", "# let the AP send couple of Beacon frames time.sleep(0.3)", "frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open mode configuration and", "observe TIM bit getting set and unset (state=%d)\" % state)", "def test_ap_open_select_any(dev, apdev): \"\"\"AP with open mode and select any", "frame not received\") resp = {} resp['fc'] = req['fc'] resp['da']", "bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" })", "Exception(\"No disconnection event received from hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP", "in res: raise Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP", "or bssid2 in res: raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1)", "dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected()", "unknown Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if bssid1 in res or bssid2", "and beaconing disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\",", "disconnect - with traffic pending - shouldn't cause kernel warnings", "{ \"ssid\": \"open\" }) bssid1 = apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'],", "\"\"\"AP with open mode and beaconing disabled (2)\"\"\" hapd =", "def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open mode configuration and invalid", "ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev =", "subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def", "range(30): if \"connect@\" not in dev.request(\"RADIO_WORK show\"): ok = True", "apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5'", "wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition succeeded unexpectedly\") except Exception, e:", "from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ],", "update couple of times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0],", "hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5)", "ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev =", "in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip',", "apdev): \"\"\"Open mode AP and wpas interface in a bridge\"\"\"", "(2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is None: raise", "\"ap_max_inactivity\": \"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid']", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')", "\"\"\"AP with open mode and beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "= req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2,", "5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd,", "Inc. # # This software may be distributed under the", "params) for i in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False)", "# disconnect - with traffic pending - shouldn't cause kernel", "= apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=1100\".format(bssid, bssid)", "interface due to allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "\" + dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on", "disconnection event received from hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with", "action=765432\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not send", "hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd,", "10): req = hapd.mgmt_rx() if req is None: raise Exception(\"MGMT", "None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5)", "frames not received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def", "br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure", "# this does not really get much useful output with", "\"OK\" not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\",", "import alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "succeeded unexpectedly\") except Exception, e: if \"Failed to add\" in", "apdev): \"\"\"AP with open mode and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "\"\"\"AP with open mode and select any network\"\"\" hapd =", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], {", "network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid1 =", "id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True)", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i in range(2):", "= dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res =", "\"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a new interface can still be", "to setup interface due to allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd, count, func): started =", "loss detection in mac80211 on dev0. # dev1 is used", "scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if bssid1", "and external ifconfig down\"\"\" params = { \"ssid\": \"open\", \"ap_max_inactivity\":", "scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\",", "allocation failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd_out_of_mem(hapd,", "\"open\" }) started = True except: pass if started: raise", "if not req: raise Exception(\"Authentication frame not received\") resp =", "failure due to missing bridge interface: \" + str(e)) else:", "if \"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def", "{ \"ssid\": \"open\" }) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\"", "if ev is None: raise Exception(\"No disconnection event received from", "raise Exception(\"dev[2] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\")", "scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2,", "dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception(\"AP", "TIM bit getting set and unset (state=%d)\" % state) def", "scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1)", "security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\": \"10\"", "dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in disconnection", "to regression-test a kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "= hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\",", "}) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev", "freq=2412) work_id = dev.request(\"RADIO_WORK add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if", "not in ev: raise Exception(\"CTRL-EVENT-CONNECT did not have matching id_str:", "= { \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\",", "= dev.request(\"RADIO_WORK add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is", "failing to setup interface due to allocation failure\"\"\" hapd =", "Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK done", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([", "{} resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da']", "10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd,", "0 for i in range(0, 10): req = hapd.mgmt_rx() if", "\"\"\"Open mode AP and wpas interface in a bridge\"\"\" br_ifname='sta-br0'", "}) started = True except: pass if started: raise Exception(\"hostapd", "not in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK", "def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open mode getting disabled and", "scan_freq=\"2412\", wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev,", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\"", "received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev):", "key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'],", "None: raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev,", "\"\"\"AP with open mode configuration and invalid WMM Action frame\"\"\"", "dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10)", "due to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\",", "on reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out association\"\"\" ssid", "try a failure case of adding an interface try: wpas.interface_add(ifname,", "Atheros, Inc. # # This software may be distributed under", "Exception, e: if \"Failed to add\" in str(e): logger.info(\"Ignore expected", "resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0,", "event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "in ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev):", "frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None or", "pending - shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED)", "Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP with open mode and", "{} {} freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise", "id) dev.request(\"RADIO_WORK done \" + work_id) ok = False for", "down\"\"\" params = { \"ssid\": \"open\", \"ap_max_inactivity\": \"1\" } hapd", "4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from i802_init()", "Exception(\"Timeout while waiting radio work to start\") id = dev.connect(\"open\",", "ack Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open mode", "wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid", "connected to unexpected AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1]", "\"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False)", "if \"OK\" not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412)", "hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK add", "import logging logger = logging.getLogger() import struct import subprocess import", "logging.getLogger() import struct import subprocess import time import os import", "apdev): \"\"\"BSSID black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0,", "dev[0].dump_monitor() # This will result in disconnection due to no", "Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not", "and SELECT_NETWORK to change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP and wpas interface", "apdev[1], 1, \"hostapd_alloc_bss_data\") for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1],", "hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" }) started = True except: pass", "mode and external ifconfig down\"\"\" params = { \"ssid\": \"open\",", "hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3): dev[i].connect(\"open\",", "bssid = apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=765432\".format(bssid,", "eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify", "\"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not None: state = 0", "= False try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" }) started =", "AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is None:", "count, func): with alloc_fail(hapd, count, func): started = False try:", "hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is None: raise Exception(\"No INTERFACE-ENABLED event\")", "dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if ev is", "\"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not None: state =", "This software may be distributed under the terms of the", "ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'],", "br_ifname=br_ifname) raise Exception(\"Interface addition succeeded unexpectedly\") except Exception, e: if", "hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count,", "related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "= req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid'] resp['payload'] =", "_test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl',", "= apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None:", "bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] +", "= None if not req: raise Exception(\"Authentication frame not received\")", "Exception(\"No INTERFACE-DISABLED event\") # The following wait tests beacon loss", "frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open mode AP after", "apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a new interface can", "scan_freq=\"2412\") # this does not really get much useful output", "'4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "try: hostapd.add_ap(apdev['ifname'], { \"ssid\": \"open\" }) started = True except:", "def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with the client in PS", "reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP", "arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to setup interface due", "AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected", "= hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if ev is None: raise", "really get much useful output with mac80211_hwsim currently, # but", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\",", "for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") #", "apdev[0]['bssid']: raise Exception(\"dev[2] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK", "be added when memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'],", "regression-test a kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "\" + ev) if dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str mismatch\")", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd = \"MGMT_TX {}", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() +", "to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']])", "ifconfig down\"\"\" params = { \"ssid\": \"open\", \"ap_max_inactivity\": \"1\" }", "apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set',", "dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") if", "None: raise Exception(\"Timeout while waiting radio work to start\") id", "out is not None: state = 0 for l in", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor()", "dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\")", "freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\",", "None: raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev):", "of adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition", "test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "None if not req: raise Exception(\"Authentication frame not received\") resp", "}) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is", "test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with open mode configuration and invalid WMM", "with open mode and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "apdev): \"\"\"AP with open mode and external ifconfig down\"\"\" params", "received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some", "timeout=10) if ev is None: raise Exception(\"Timeout on MGMT-TX-STATUS\") if", "ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise", "(2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\" })", "req is None: raise Exception(\"MGMT RX wait timed out\") if", "apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0]", "if ev is None: raise Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\"", "may be distributed under the terms of the BSD license.", "\"hostapd_driver_init\") for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\")", "a failure case of adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname)", "state != 2: raise Exception(\"Didn't observe TIM bit getting set", "Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res", "aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected", "re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "mode (no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "req['subtype'] == 11: break req = None if not req:", "not ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open", "i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1,", "out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not", "dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412)", "hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out", "hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa = hapd.own_addr()", "event received from hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open", "Exception(\"dev[0] connected to unexpected AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise", "import time import os import hostapd import hwsim_utils from tshark", "hwsim_utils from tshark import run_tshark from utils import alloc_fail from", "apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add(\"wlan5\", drv_params=\"force_connect_cmd=1\") sta_enable_disable(wpas, bssid)", "\" + str(e)) else: raise # Next, add the bridge", "test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open mode configuration and large packet", "\"ssid\": \"open2\" }) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\",", "with open mode configuration and unknown Action frame\"\"\" hapd =", "dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if", "hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa", "set and unset (state=%d)\" % state) def test_ap_open_select_network(dev, apdev): \"\"\"Open", "ev: raise Exception(\"AP did not ack Action frame\") def test_ap_open_invalid_wmm_action(dev,", "error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out association\"\"\"", "does not really get much useful output with mac80211_hwsim currently,", "accepted due to matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open',", "mode configuration and large packet loss\"\"\" params = { \"ssid\":", "= apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') wpas.interface_add(\"wlan5\", drv_params=\"force_connect_cmd=1\") sta_enable_disable(wpas,", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON", "configuration and unknown Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "None: raise Exception(\"AP was seen beaconing\") if \"OK\" not in", "bssid2 in res: raise Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev):", "scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\",", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be", "\"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd", "utils import alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev):", "try: # inject some traffic sa = hapd.own_addr() da =", "except: pass if started: raise Exception(\"hostapd interface started even with", "completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None:", "much useful output with mac80211_hwsim currently, # but run through", "dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP and wpas", "hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\") if", "dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not", "None or \"result=SUCCESS\" not in ev: raise Exception(\"AP did not", "interface: \" + str(e)) else: raise # Next, add the", "event received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject", "radio work missing\") dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK done \"", "= hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is None: raise Exception(\"Timeout on", "= dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise Exception(\"Unexpected", "def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out association\"\"\" ssid = \"test\"", "apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will", "'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is", "run through the channel survey update couple of times for", "\"\"\"AP with open mode configuration and unknown Action frame\"\"\" hapd", "(1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is None: raise", "wait tests beacon loss detection in mac80211 on dev0. #", "with open mode configuration and large packet loss\"\"\" params =", "dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20)", "interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip',", "state = 2 if state != 2: raise Exception(\"Didn't observe", "traffic pending - shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0],", "AP tests # Copyright (c) 2014, Qualcomm Atheros, Inc. #", "to unexpected AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[2] connected", "0: state = 1 elif pvb == 0 and state", "hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if ev is None: raise Exception(\"No", "if \"connect@\" not in dev.request(\"RADIO_WORK show\"): ok = True break", "in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def", "br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw',", "ev: raise Exception(\"CTRL-EVENT-CONNECT did not have matching id_str: \" +", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\",", "alloc_fail from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP with", "did not ack Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev): \"\"\"AP with", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "\"MGMT_TX {} {} freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd):", "if ev is None: raise Exception(\"No INTERFACE-DISABLED event\") # The", "apdev): \"\"\"AP with open mode configuration and invalid WMM Action", "if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected()", "Exception(\"Could not send test Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10)", "key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev =", "wait timed out\") if req['subtype'] == 11: break req =", "failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\" not in", "e: if \"Failed to add\" in str(e): logger.info(\"Ignore expected interface_add", "def test_ap_open_id_str(dev, apdev): \"\"\"AP with open mode and id_str\"\"\" hapd", "and unset (state=%d)\" % state) def test_ap_open_select_network(dev, apdev): \"\"\"Open mode", "dev0. # dev1 is used to test stopping of AP", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid'])", "open mode and select any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "\"hostapd_iface_alloc\") for i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\")", "with open mode and beaconing disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "hostapd import hwsim_utils from tshark import run_tshark from utils import", "\"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'],", "ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is None: raise Exception(\"No", "key_mgmt=\"NONE\", scan_freq=\"2412\") # this does not really get much useful", "ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if ev is None:", "import run_tshark from utils import alloc_fail from wpasupplicant import WpaSupplicant", "and unexpected association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "RX wait timed out\") if req['subtype'] == 11: break req", "1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") # verify that a new interface can still", "entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\")", "\"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\")", "test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection and SELECT_NETWORK to change network\"\"\"", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\",", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\",", "'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() #", "is not None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev):", "os import hostapd import hwsim_utils from tshark import run_tshark from", "= 0 for l in out.splitlines(): pvb = int(l, 16)", "hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for i in range(1, 4): hapd_out_of_mem(hapd,", "dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\")", "scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid')", "hapd.wait_event([ \"AP-STA-DISCONNECTED\" ], timeout=5) if ev is None: raise Exception(\"No", "not received\") resp = {} resp['fc'] = req['fc'] resp['da'] =", "req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid'] = req['bssid']", "interface started even with memory allocation failure: \" + arg)", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\",", "!= 2: raise Exception(\"Didn't observe TIM bit getting set and", "ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in ev: raise Exception(\"CTRL-EVENT-CONNECT", "cmd = \"MGMT_TX {} {} freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\"", "wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open mode", "unexpectedly\") except Exception, e: if \"Failed to add\" in str(e):", "time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try: # inject some traffic sa =", "ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname,", "freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not", "else: raise # Next, add the bridge interface and add", "subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK", "can still be added when memory allocation does # not", "kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "added when memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], {", "is None: raise Exception(\"Timeout while waiting radio work to start\")", "timeout=10) if ev is None: raise Exception(\"No result reported\") if", "raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if", "in res: raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected()", "wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP with open mode", "inject some traffic sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG", "if ev is None: raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0],", "hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if bssid1 in res", "terms of the BSD license. # See README for more", "will result in disconnection due to no matching network subprocess.call(['iw',", "tests # Copyright (c) 2014, Qualcomm Atheros, Inc. # #", "None: raise Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\" not in ev:", "timeout=0.1) if ev is not None: raise Exception(\"Unexpected dev[2] connectin\")", "test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to setup interface due to allocation", "dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def", "software may be distributed under the terms of the BSD", "\" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to", "!= apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") if dev[2].get_status_field('bssid')", "timeout=0.1) if ev is not None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\")", "hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd,", "dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev", "subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link', 'set',", "i in range(30): if \"connect@\" not in dev.request(\"RADIO_WORK show\"): ok", "network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) id =", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\"", "\"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\"", "range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for i in range(1,", "3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open mode configuration", "out.splitlines(): pvb = int(l, 16) if pvb > 0 and", "% state) def test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection and SELECT_NETWORK", "}) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "(no security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\":", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \"", "if assoc != 3: raise Exception(\"Association Request frames not received:", "add the interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname,", "= hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is None: raise Exception(\"Timeout on", "key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if", "not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15)", "\"0.5\", \"ignore_reassoc_probability\": \"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=apdev[1]['bssid']) dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\",", "dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise", "if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412)", "hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "this does not really get much useful output with mac80211_hwsim", "i in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1],", "\"ssid\": \"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") # this", "security) configuration\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "}) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd = \"MGMT_TX", "with open mode getting disabled and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "(c) 2014, Qualcomm Atheros, Inc. # # This software may", "ev is None: raise Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd)", "due to no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect', 'open',", "all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "getting disabled and re-enabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "# This will be accepted due to matching network subprocess.call(['iw',", "= apdev[0]['bssid'] cmd = \"MGMT_TX {} {} freq=2412 action=765432\".format(bssid, bssid)", "\"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is None: raise Exception(\"No result reported\")", "import struct import subprocess import time import os import hostapd", "result reported\") if \"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\")", "br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev',", "failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP with", "hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd, count,", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "open mode and external ifconfig down\"\"\" params = { \"ssid\":", "client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'up'])", "start\") id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" %", "1 elif pvb == 0 and state == 1: state", "id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "pvb = int(l, 16) if pvb > 0 and state", "and add the interface again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd',", "test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out association\"\"\" ssid = \"test\" hapd", "apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1],", "raise Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected", "ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP", "elif pvb == 0 and state == 1: state =", "\"\"\"AP with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc = 0", "The following wait tests beacon loss detection in mac80211 on", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open mode", "break req = None if not req: raise Exception(\"Authentication frame", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\" }) bssid =", "to start\") id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\"", "False for i in range(30): if \"connect@\" not in dev.request(\"RADIO_WORK", "scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted due", "wait timed out\") if req['subtype'] == 0: assoc += 1", "hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev,", "ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname)", "br_ifname, '0']) subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'up']) subprocess.call(['iw', ifname,", "# This software may be distributed under the terms of", "mode and select any network\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "\"\"\"BSSID black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid1 = apdev[0]['bssid'] hapd2", "\"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\", \"ignore_assoc_probability\": \"0.5\", \"ignore_reassoc_probability\": \"0.5\" }", "only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is", "again subprocess.call(['brctl', 'addbr', br_ifname]) subprocess.call(['brctl', 'setfd', br_ifname, '0']) subprocess.call(['ip', 'link',", "apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is None:", "mac80211 on dev0. # dev1 is used to test stopping", "bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd2", "apdev): \"\"\"AP with open mode and unexpected association event\"\"\" hapd", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0],", "test_ap_open_id_str(dev, apdev): \"\"\"AP with open mode and id_str\"\"\" hapd =", "= \"MGMT_TX {} {} freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\" in", "is None: raise Exception(\"MGMT RX wait timed out\") if req['subtype']", "when memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\":", "with mac80211_hwsim currently, # but run through the channel survey", "key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0, 3): dev[i].wait_connected(timeout=20) def", "time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func): with alloc_fail(hapd, count, func):", "if state != 2: raise Exception(\"Didn't observe TIM bit getting", "hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED", "in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect radio work missing\") dev.request(\"DISABLE_NETWORK %d\"", "i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def", "= dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise Exception(\"Unexpected", "!= apdev[0]['bssid']: raise Exception(\"dev[2] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\")", "+ apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise", "\"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\")", "dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5)", "not None: raise Exception(\"AP was seen beaconing\") if \"OK\" not", "import hostapd import hwsim_utils from tshark import run_tshark from utils", "= dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None: raise Exception(\"Timeout on", "work_id) ok = False for i in range(30): if \"connect@\"", "def test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection and SELECT_NETWORK to change", "= 1 elif pvb == 0 and state == 1:", "not send test Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if", "hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1],", "raise Exception(\"Unexpected blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2)", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\":", "dev[0].request(cmd): raise Exception(\"Could not send test Action frame\") ev =", "\"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try", "= req['fc'] resp['da'] = req['sa'] resp['sa'] = req['da'] resp['bssid'] =", "loss\"\"\" params = { \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\": \"0.5\",", "\"ssid\": \"open\" }) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\",", "id_str: \" + ev) if dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str", "\"ignore_reassoc_probability\": \"0.5\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) for i in", "work_id = dev.request(\"RADIO_WORK add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev", "dev.request(\"DISABLE_NETWORK %d\" % id) dev.request(\"RADIO_WORK done \" + work_id) ok", "addition succeeded unexpectedly\") except Exception, e: if \"Failed to add\"", "dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected AP\") if", "on AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is", "def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open mode configuration and unknown", "details. import logging logger = logging.getLogger() import struct import subprocess", "{ \"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for i", "AP send couple of Beacon frames time.sleep(0.3) # disconnect -", "\"\"\"Reconnect to open mode AP after inactivity related disconnection\"\"\" hapd", "da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa))", "hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid,", "key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if", "0 and state == 1: state = 2 if state", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid", "not really get much useful output with mac80211_hwsim currently, #", "RX wait timed out\") if req['subtype'] == 0: assoc +=", "0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP send couple", "connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev =", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" +", "raise Exception(\"Didn't observe TIM bit getting set and unset (state=%d)\"", "wait_connect=False) for i in range(0, 10): req = hapd.mgmt_rx() if", "\"open\" }) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open mode", "disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\"", "def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open mode AP after inactivity", "First, try a failure case of adding an interface try:", "have matching id_str: \" + ev) if dev[0].get_status_field(\"id_str\") != \"foo\":", "hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid1 = apdev[0]['bssid']", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd", "i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock()", "hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" }) bssid2 = apdev[1]['bssid']", "bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK add block-work\") ev =", "dev.request(\"RADIO_WORK done \" + work_id) ok = False for i", "if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev", "'link', 'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if", "= int(l, 16) if pvb > 0 and state ==", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i", "dev[0].request(\"BLACKLIST\") if bssid1 in res or bssid2 in res: raise", "Exception(\"No INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params):", "dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10) def test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open mode and", "to unexpected AP\") if dev[1].get_status_field('bssid') != apdev[0]['bssid']: raise Exception(\"dev[1] connected", "test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open mode and beaconing disabled (2)\"\"\"", "# inject some traffic sa = hapd.own_addr() da = dev[0].own_addr()", "test_ap_open(dev, apdev): \"\"\"AP with open mode (no security) configuration\"\"\" hapd", "Exception(\"dev[1] connected to unexpected AP\") if dev[2].get_status_field('bssid') != apdev[0]['bssid']: raise", "with open mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result", "apdev): \"\"\"AP with open mode and beaconing disabled\"\"\" hapd =", "not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not", "block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None: raise Exception(\"Timeout", "{} {} freq=2412 action=765432\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise", "in disconnection due to no matching network subprocess.call(['iw', 'dev', dev[0].ifname,", "id_str=\"foo\", wait_connect=False) ev = dev[0].wait_connected(timeout=10) if \"id_str=foo\" not in ev:", "AP and wpas interface in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try:", "test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open mode AP after inactivity related", "pass if started: raise Exception(\"hostapd interface started even with memory", "of Beacon frames time.sleep(0.3) # disconnect - with traffic pending", "'link', 'set', 'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"],", "Exception(\"Association Request frames not received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\")", "mode configuration and invalid WMM Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr()", "subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down']) subprocess.call(['brctl', 'delif', br_ifname, ifname])", "2: raise Exception(\"Didn't observe TIM bit getting set and unset", "\"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\",", "raise Exception(\"connect radio work not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1)", "to test stopping of AP side functionality on client polling.", "beaconing disabled (2)\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\":", "key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd = \"MGMT_TX {} {}", "= struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp) assoc = 0 for", "logger.info(\"Ignore expected interface_add failure due to missing bridge interface: \"", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) id = dev[0].connect(\"unknown\",", "MGMT-TX-STATUS\") if \"result=SUCCESS\" not in ev: raise Exception(\"AP did not", "bit getting set and unset (state=%d)\" % state) def test_ap_open_select_network(dev,", "ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is None: raise", "apdev): \"\"\"Reconnect to open mode AP after inactivity related disconnection\"\"\"", "and state == 1: state = 2 if state !=", "hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for i", "all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None:", "mode AP and wpas interface in a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5'", "resp['bssid'] = req['bssid'] resp['payload'] = struct.pack('<HHH', 0, 2, 0) hapd.mgmt_tx(resp)", "for i in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP", "params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "None: raise Exception(\"No disconnection event received from hostapd\") def test_ap_open_packet_loss(dev,", "ev is None: raise Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\" not", "beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\": \"1\"", "i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd)", "missing bridge interface: \" + str(e)) else: raise # Next,", "in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\")", "raise Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open", "invalid WMM Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "Exception(\"AP did not ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect", "def test_ap_bss_load(dev, apdev): \"\"\"AP with open mode (no security) configuration\"\"\"", "list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd2 =", "hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with the client", "dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is None: raise Exception(\"No result", "params): \"\"\"Disconnect with the client in PS to regression-test a", "hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {} {} 0'.format(da, sa)) hapd.request('DATA_TEST_CONFIG 0') #", "\"open\" }) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\" hapd =", "apdev[0]['bssid'] hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" }) bssid2 =", "], timeout=5) if ev is None: raise Exception(\"No connection event", "{} freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could", "= apdev[0]['bssid'] dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link',", "\" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']:", "wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with", "# eloop_register_read_sock() call from i802_init() hapd_out_of_mem(hapd, apdev[1], 1, \"eloop_sock_table_add_sock;eloop_register_sock;?eloop_register_read_sock;=i802_init\") #", "dev[0].get_bss(bssid) is not None: raise Exception(\"AP was seen beaconing\") if", "def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id = dev.request(\"RADIO_WORK add block-work\")", "unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\",", "client in PS to regression-test a kernel bug\"\"\" hapd =", "does # not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) def", "% id) dev.request(\"RADIO_WORK done \" + work_id) ok = False", "not in dev.request(\"RADIO_WORK show\"): ok = True break time.sleep(0.1) if", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'],", "Next, add the bridge interface and add the interface again", "16) if pvb > 0 and state == 0: state", "ev) if dev[0].get_status_field(\"id_str\") != \"foo\": raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev,", "\"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for i in", "is not None: raise Exception(\"AP was seen beaconing\") if \"OK\"", "# not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) def test_bssid_black_white_list(dev,", "AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is None:", "dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out", "verify that a new interface can still be added when", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() #", "hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for i in range(1, 5): hapd_out_of_mem(hapd,", "packet loss\"\"\" params = { \"ssid\": \"open\", \"ignore_probe_probability\": \"0.5\", \"ignore_auth_probability\":", "'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_connected(timeout=15) dev[0].dump_monitor() dev[0].request(\"REMOVE_NETWORK all\")", "with alloc_fail(hapd, count, func): started = False try: hostapd.add_ap(apdev['ifname'], {", "mode and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "in res or bssid2 in res: raise Exception(\"Unexpected blacklist entry(2)\")", "hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if", "- with traffic pending - shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\")", "is not None: raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def", "memory allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\"", "state == 1: state = 2 if state != 2:", "in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i, \"=wpa_driver_nl80211_drv_init\") # eloop_register_read_sock() call", "\"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", id_str=\"foo\", wait_connect=False) ev =", "def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "currently, # but run through the channel survey update couple", "blacklist entry\") hwsim_utils.test_connectivity(dev[0], hapd1) dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res =", "\"ssid\": \"open\" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas =", "raise Exception(\"AP did not ack Action frame\") def test_ap_open_invalid_wmm_action(dev, apdev):", "couple of times for i in range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd)", "'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "open mode configuration and unknown Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode", "sa)) hapd.request('DATA_TEST_CONFIG 0') # let the AP send couple of", "and id_str\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "\"foo\": raise Exception(\"id_str mismatch\") def test_ap_open_select_any(dev, apdev): \"\"\"AP with open", "logging logger = logging.getLogger() import struct import subprocess import time", "{ \"ssid\": \"open\", \"bss_load_update_period\": \"10\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") #", "run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not None: state", "in ev: raise Exception(\"AP did not ack Action frame\") def", "if started: raise Exception(\"hostapd interface started even with memory allocation", "0) hapd.mgmt_tx(resp) assoc = 0 for i in range(0, 10):", "scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id) if \"connect@\" not in", "raise Exception(\"MGMT RX wait timed out\") if req['subtype'] == 11:", "WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure case of adding an", "scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open mode and beaconing", "with the client in PS to regression-test a kernel bug\"\"\"", "# The following wait tests beacon loss detection in mac80211", "ssid = \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" })", "dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with", "\"ssid\": \"open\", \"ap_max_inactivity\": \"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev", "wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \"", "ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def", "done \" + work_id) ok = False for i in", "work not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is", "if ev is None: raise Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\"", "{ \"ssid\": \"open\" }) bssid = apdev[0]['bssid'] sta_enable_disable(dev[0], bssid) wpas", "'dev', dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev):", "raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open", "dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in", "side functionality on client polling. dev[1].request(\"REMOVE_NETWORK all\") subprocess.call(['ip', 'link', 'set',", "for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\") for", "dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \"", "wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5') # First, try a failure case of", "dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id =", "subprocess import time import os import hostapd import hwsim_utils from", "\"\"\"Open mode connection and SELECT_NETWORK to change network\"\"\" hapd1 =", "scan_freq=\"2412\", bg_scan_period=\"0\") for i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected()", "Request frames not received: assoc=%d\" % assoc) hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15)", "shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out", "bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "apdev): \"\"\"hostapd failing to setup interface due to allocation failure\"\"\"", "if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected AP\")", "range(0, 10): req = hapd.mgmt_rx() if req is None: raise", "= hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if ev is None: raise Exception(\"No INTERFACE-ENABLED", "with memory allocation failure: \" + arg) def test_ap_open_out_of_memory(dev, apdev):", "cause kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out =", "\"\"\"AP timing out association\"\"\" ssid = \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "a bridge\"\"\" br_ifname='sta-br0' ifname='wlan5' try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link',", "change network\"\"\" hapd1 = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid1", "if ev is not None: raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK", "'set', 'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev", "more details. import logging logger = logging.getLogger() import struct import", "def test_ap_open(dev, apdev): \"\"\"AP with open mode (no security) configuration\"\"\"", "= 2 if state != 2: raise Exception(\"Didn't observe TIM", "import WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP with open mode (no", "== 0: assoc += 1 if assoc == 3: break", "id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\",", "'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is None: raise", "did not ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to", "hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is None: raise Exception(\"No INTERFACE-DISABLED event\")", "ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev, apdev): \"\"\"Reconnect to open mode", "dev[0].select_network(id2) dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd2) res = dev[0].request(\"BLACKLIST\") if bssid1 in", "key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res = dev[0].request(\"BLACKLIST\") if", "for i in range(30): if \"connect@\" not in dev.request(\"RADIO_WORK show\"):", "test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect with the client in PS to", "INTERFACE-ENABLED event\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def test_ap_open_disconnect_in_ps(dev, apdev, params): \"\"\"Disconnect", "license. # See README for more details. import logging logger", "waiting radio work to start\") id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\",", "send test Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev", "following wait tests beacon loss detection in mac80211 on dev0.", "test stopping of AP side functionality on client polling. dev[1].request(\"REMOVE_NETWORK", "disconnection due to no matching network subprocess.call(['iw', 'dev', dev[0].ifname, 'connect',", "bg_scan_period=\"0\") ev = hapd.wait_event([ \"AP-STA-CONNECTED\" ], timeout=5) if ev is", "\"open\", \"ap_max_inactivity\": \"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid =", "+ \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected", "started: raise Exception(\"hostapd interface started even with memory allocation failure:", "None: raise Exception(\"No INTERFACE-DISABLED event\") # The following wait tests", "failure case of adding an interface try: wpas.interface_add(ifname, br_ifname=br_ifname) raise", "\"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid'] cmd =", "hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid): dev.scan_for_bss(bssid, freq=2412) work_id", "to missing bridge interface: \" + str(e)) else: raise #", "hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() + \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout", "if \"FAIL\" in dev[0].request(cmd): raise Exception(\"Could not send test Action", "warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"),", "let the AP send couple of Beacon frames time.sleep(0.3) #", "This will result in disconnection due to no matching network", "apdev): \"\"\"AP with open mode getting disabled and re-enabled\"\"\" hapd", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid = apdev[0]['bssid']", "== 11: break req = None if not req: raise", "received from hostapd\") def test_ap_open_packet_loss(dev, apdev): \"\"\"AP with open mode", "and state == 0: state = 1 elif pvb ==", "scan_freq=\"2412\", only_add_network=True) dev[0].select_network(id) ev = dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev", "event\") # The following wait tests beacon loss detection in", "test_ap_open_wpas_in_bridge(dev, apdev): \"\"\"Open mode AP and wpas interface in a", "and large packet loss\"\"\" params = { \"ssid\": \"open\", \"ignore_probe_probability\":", "in PS to regression-test a kernel bug\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "\"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for i in range(1, 4):", "dev[0].scan(freq=2412, only_new=True) if dev[0].get_bss(bssid) is not None: raise Exception(\"AP was", "def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID black/white list\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "bridge interface: \" + str(e)) else: raise # Next, add", "% id) if \"connect@\" not in dev.request(\"RADIO_WORK show\"): raise Exception(\"connect", "all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"11:22:33:44:55:66/ff:00:00:00:00:00", "key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") for i in range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\")", "raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") if \"OK\"", "failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP", "all\") dev[0].wait_disconnected(timeout=5) dev[0].dump_monitor() # This will result in disconnection due", "hapd.mgmt_tx(resp) assoc = 0 for i in range(0, 10): req", "= hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5) if ev is None: raise Exception(\"No INTERFACE-DISABLED", "\"ssid\": \"open\", \"start_disabled\": \"1\" }) bssid = apdev[0]['bssid'] dev[0].flush_scan_cache() dev[0].scan(freq=2412,", "Exception(\"Authentication frame not received\") resp = {} resp['fc'] = req['fc']", "fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) def test_bssid_black_white_list(dev, apdev): \"\"\"BSSID", "test_ap_open_unexpected_assoc_event(dev, apdev): \"\"\"AP with open mode and unexpected association event\"\"\"", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].scan(freq=\"2412\") hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\",", "\"Failed to add\" in str(e): logger.info(\"Ignore expected interface_add failure due", "failure: \" + arg) def test_ap_open_out_of_memory(dev, apdev): \"\"\"hostapd failing to", "req['subtype'] == 0: assoc += 1 if assoc == 3:", "Exception(\"CTRL-EVENT-CONNECT did not have matching id_str: \" + ev) if", "traffic sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX", "not None: raise Exception(\"Unexpected dev[2] connectin\") dev[2].request(\"REMOVE_NETWORK all\") def test_ap_open_wpas_in_bridge(dev,", "Exception(\"No connection event received from hostapd\") time.sleep(0.2) hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_MANUAL_POLL) try:", "raise Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\" not in ev: raise", "bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected", "work to start\") id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK", "range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\")", "'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd = hostapd.add_ap(apdev[0]['ifname'], {", "apdev[1]['bssid'] id1 = dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) id2 = dev[0].connect(\"open2\",", "(state=%d)\" % state) def test_ap_open_select_network(dev, apdev): \"\"\"Open mode connection and", "if req['subtype'] == 11: break req = None if not", "ev is None: raise Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\" in", "hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev,", "out association\"\"\" ssid = \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":", "== 3: break if assoc != 3: raise Exception(\"Association Request", "dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP with open mode and external", "cmd = \"MGMT_TX {} {} freq=2412 action=1100\".format(bssid, bssid) if \"FAIL\"", "is None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"],", "with open mode configuration and invalid WMM Action frame\"\"\" hapd", "dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open mode and wpa_supplicant", "\"open\" }) hapd2 = hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\",", "dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is None or \"result=SUCCESS\" not in", "br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev):", "connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with open mode and", "open mode AP after inactivity related disconnection\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'],", "did not have matching id_str: \" + ev) if dev[0].get_status_field(\"id_str\")", "!= apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\")", "all\") dev[1].request(\"REMOVE_NETWORK all\") dev[2].request(\"REMOVE_NETWORK all\") dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00\", wait_connect=False)", "{ \"ssid\": \"open\" }) id = dev[0].connect(\"unknown\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True)", "= hapd.mgmt_rx() if req is None: raise Exception(\"MGMT RX wait", "1 if assoc == 3: break if assoc != 3:", "= hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) br_ifname='sta-br0' ifname='wlan5' wpas =", "dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev is not None: raise Exception(\"Unexpected dev[2]", "dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected() dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP with open", "started even with memory allocation failure: \" + arg) def", "with open mode and external ifconfig down\"\"\" params = {", "'addif', br_ifname, ifname]) wpas.interface_add(ifname, br_ifname=br_ifname) wpas.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev,", "not req: raise Exception(\"Authentication frame not received\") resp = {}", "dev[0].request(\"RECONNECT\") dev[0].wait_connected() def test_ap_open_ifdown(dev, apdev): \"\"\"AP with open mode and", "= hostapd.add_ap(apdev[0]['ifname'], params) for i in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\",", "add the bridge interface and add the interface again subprocess.call(['brctl',", "for more details. import logging logger = logging.getLogger() import struct", "\"hostapd_alloc_bss_data\") for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_iface_alloc\")", "bssid_blacklist=\"11:22:33:44:55:66/ff:00:00:00:00:00 \" + apdev[1]['bssid'] + \" aa:bb:cc:dd:ee:ff\") if dev[0].get_status_field('bssid') !=", "'dev', apdev[0]['ifname'], 'down']) ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=10) if ev is", "and beaconing disabled\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\", \"start_disabled\":", "in str(e): logger.info(\"Ignore expected interface_add failure due to missing bridge", "sa = hapd.own_addr() da = dev[0].own_addr() hapd.request('DATA_TEST_CONFIG 1') hapd.request('DATA_TEST_TX {}", "test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open mode getting disabled and re-enabled\"\"\"", "not in hapd.request(\"RELOAD\"): raise Exception(\"RELOAD failed\") dev[0].scan_for_bss(bssid, freq=2412) dev[0].connect(\"open\", key_mgmt=\"NONE\",", "bssid_blacklist=apdev[1]['bssid']) dev[2].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bssid_whitelist=\"00:00:00:00:00:00/00:00:00:00:00:00\", bssid_blacklist=apdev[1]['bssid']) if dev[0].get_status_field('bssid') != apdev[1]['bssid']:", "None: raise Exception(\"No connection event received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd)", "ev: raise Exception(\"AP did not ack Action frame\") def test_ap_open_reconnect_on_inactivity_disconnect(dev,", "{ \"ssid\": \"open\", \"ap_max_inactivity\": \"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params)", "apdev): \"\"\"AP with open mode and select any network\"\"\" hapd", "{ \"ssid\": \"open2\" }) bssid2 = apdev[1]['bssid'] id1 = dev[0].connect(\"open\",", "event received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([", "hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open2\" }) bssid2 = apdev[1]['bssid'] id1 =", "due to missing bridge interface: \" + str(e)) else: raise", "in dev[0].request(cmd): raise Exception(\"Could not send test Action frame\") ev", "is None: raise Exception(\"No connection event received from hostapd\") time.sleep(0.2)", "1, \"hostapd_alloc_bss_data\") for i in range(1, 3): hapd_out_of_mem(hapd, apdev[1], i,", "\"AP-STA-CONNECTED\" ], timeout=5) if ev is None: raise Exception(\"No connection", "apdev[0]['bssid']: raise Exception(\"dev[1] connected to unexpected AP\") if dev[2].get_status_field('bssid') !=", "beaconing\") if \"OK\" not in hapd.request(\"UPDATE_BEACON\"): raise Exception(\"UPDATE_BEACON failed\") dev[0].scan_for_bss(bssid,", "unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev = dev[2].wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1)", "import subprocess import time import os import hostapd import hwsim_utils", "!= apdev[1]['bssid']: raise Exception(\"dev[0] connected to unexpected AP\") if dev[1].get_status_field('bssid')", "received from hostapd\") hwsim_utils.test_connectivity(dev[0], hapd) dev[0].request(\"DISCONNECT\") ev = hapd.wait_event([ \"AP-STA-DISCONNECTED\"", "[\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is not None: state = 0 for", "\"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_driver_init\") for", "try: wpas.interface_add(ifname, br_ifname=br_ifname) raise Exception(\"Interface addition succeeded unexpectedly\") except Exception,", "the bridge interface and add the interface again subprocess.call(['brctl', 'addbr',", "not have matching id_str: \" + ev) if dev[0].get_status_field(\"id_str\") !=", "'up']) subprocess.call(['iw', ifname, 'set', '4addr', 'on']) subprocess.check_call(['brctl', 'addif', br_ifname, ifname])", "res: raise Exception(\"Unexpected blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with", "Qualcomm Atheros, Inc. # # This software may be distributed", "+ \" reason=4\") dev[0].wait_disconnected(timeout=5) dev[0].wait_connected(timeout=2, error=\"Timeout on reconnection\") def test_ap_open_assoc_timeout(dev,", "time.sleep(0.2) out = run_tshark(os.path.join(params['logdir'], \"hwsim0.pcapng\"), \"wlan_mgt.tim.partial_virtual_bitmap\", [\"wlan_mgt.tim.partial_virtual_bitmap\"]) if out is", "0, 2, 0) hapd.mgmt_tx(resp) assoc = 0 for i in", "resp = {} resp['fc'] = req['fc'] resp['da'] = req['sa'] resp['sa']", "timeout=5) if ev is None: raise Exception(\"No connection event received", "{ \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", bg_scan_period=\"0\") ev =", "def test_ap_open_start_disabled2(dev, apdev): \"\"\"AP with open mode and beaconing disabled", "blacklist entry(2)\") def test_ap_open_disable_enable(dev, apdev): \"\"\"AP with open mode getting", "id = dev.connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", only_add_network=True) dev.request(\"ENABLE_NETWORK %d\" % id)", "range(2): hapd.request(\"DISABLE\") dev[0].wait_disconnected() hapd.request(\"ENABLE\") dev[0].wait_connected() hwsim_utils.test_connectivity(dev[0], hapd) def sta_enable_disable(dev, bssid):", "# This will result in disconnection due to no matching", "allocation does # not fail hostapd.add_ap(apdev[1]['ifname'], { \"ssid\": \"open\" })", "ev is None: raise Exception(\"Timeout while waiting radio work to", "mode and wpa_supplicant ENABLE/DISABLE_NETWORK\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\"", "# # This software may be distributed under the terms", "id2 = dev[0].connect(\"open2\", key_mgmt=\"NONE\", scan_freq=\"2412\") hwsim_utils.test_connectivity(dev[0], hapd2) dev[0].select_network(id1) dev[0].wait_connected() res", "hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for i in range(1, 3): hapd_out_of_mem(hapd,", "1, \"hostapd_driver_init\") for i in range(1, 4): hapd_out_of_mem(hapd, apdev[1], i,", "mode configuration and unknown Action frame\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "if req['subtype'] == 0: assoc += 1 if assoc ==", "= dev[0].wait_event([\"CTRL-EVENT-NETWORK-NOT-FOUND\", \"CTRL-EVENT-CONNECTED\"], timeout=10) if ev is None: raise Exception(\"No", "dev.request(\"RADIO_WORK add block-work\") ev = dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None:", "+ str(e)) else: raise # Next, add the bridge interface", "See README for more details. import logging logger = logging.getLogger()", "the client in PS to regression-test a kernel bug\"\"\" hapd", "!= 3: raise Exception(\"Association Request frames not received: assoc=%d\" %", "failure\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) hapd_out_of_mem(hapd, apdev[1],", "reconnection\") def test_ap_open_assoc_timeout(dev, apdev): \"\"\"AP timing out association\"\"\" ssid =", "scan_freq=\"2412\") dev[1].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") subprocess.call(['ip', 'link', 'set', 'dev', apdev[0]['ifname'], 'down'])", "try: _test_ap_open_wpas_in_bridge(dev, apdev) finally: subprocess.call(['ip', 'link', 'set', 'dev', br_ifname, 'down'])", "test Action frame\") ev = dev[0].wait_event([\"MGMT-TX-STATUS\"], timeout=10) if ev is", "is None: raise Exception(\"Timeout on MGMT-TX-STATUS\") if \"result=SUCCESS\" not in", "hapd.set(\"ext_mgmt_frame_handling\", \"1\") dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in range(0,", "hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) bssid = apdev[0]['bssid']", "\"1\" } hapd = hostapd.add_ap(apdev[0]['ifname'], params) bssid = apdev[0]['bssid'] dev[0].connect(\"open\",", "\"open\" }) hapd_out_of_mem(hapd, apdev[1], 1, \"hostapd_alloc_bss_data\") for i in range(1,", "hapd.set(\"ext_mgmt_frame_handling\", \"0\") dev[0].wait_connected(timeout=15) def test_ap_open_id_str(dev, apdev): \"\"\"AP with open mode", "if ev is not None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def", "dev.wait_event([\"EXT-RADIO-WORK-START\"]) if ev is None: raise Exception(\"Timeout while waiting radio", "Exception(\"dev[1] connected to unexpected AP\") dev[0].request(\"REMOVE_NETWORK all\") dev[1].request(\"REMOVE_NETWORK all\") ev", "the AP send couple of Beacon frames time.sleep(0.3) # disconnect", "- shouldn't cause kernel warnings dev[0].request(\"DISCONNECT\") finally: hwsim_utils.set_powersave(dev[0], hwsim_utils.PS_DISABLED) time.sleep(0.2)", "dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15) dev[0].dump_monitor() # This will be accepted due to", "on AP-STA-DISCONNECTED (1)\") ev = hapd.wait_event([\"AP-STA-DISCONNECTED\"], timeout=5) if ev is", "hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def hapd_out_of_mem(hapd, apdev, count, func): with", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") dev[0].request(\"DISCONNECT\") dev[0].wait_disconnected(timeout=15)", "br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off']) def _test_ap_open_wpas_in_bridge(dev, apdev): hapd", "dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") hapd.request(\"DEAUTHENTICATE \" + dev[0].p2p_interface_addr() + \" reason=4\")", "timeout=5) if ev is None: raise Exception(\"No disconnection event received", "br_ifname, ifname]) subprocess.call(['brctl', 'delbr', br_ifname]) subprocess.call(['iw', ifname, 'set', '4addr', 'off'])", "apdev): \"\"\"AP with open mode configuration and unknown Action frame\"\"\"", "str(e): logger.info(\"Ignore expected interface_add failure due to missing bridge interface:", "configuration and large packet loss\"\"\" params = { \"ssid\": \"open\",", "apdev, count, func): with alloc_fail(hapd, count, func): started = False", "from wpasupplicant import WpaSupplicant def test_ap_open(dev, apdev): \"\"\"AP with open", "timing out association\"\"\" ssid = \"test\" hapd = hostapd.add_ap(apdev[0]['ifname'], {", "is None: raise Exception(\"No INTERFACE-DISABLED event\") # The following wait", "reported\") if \"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected connection\") dev[0].select_network(\"any\") dev[0].wait_connected(timeout=10)", "0 for l in out.splitlines(): pvb = int(l, 16) if", "None: raise Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\" in ev: raise", "dev[0].ifname, 'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP", "in range(1, 5): hapd_out_of_mem(hapd, apdev[1], i, \"hostapd_config_defaults;hostapd_config_alloc\") hapd_out_of_mem(hapd, apdev[1], 1,", "hostapd.add_ap(apdev[0]['ifname'], { \"ssid\": \"open\" }) dev[0].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\") bssid =", "None: raise Exception(\"Timeout on AP-STA-DISCONNECTED (2)\") ev = hapd.wait_event([\"INTERFACE-DISABLED\"], timeout=5)", "# but run through the channel survey update couple of", "'connect', 'open', \"2412\", apdev[0]['bssid']]) dev[0].wait_disconnected(timeout=15) def test_ap_bss_load(dev, apdev): \"\"\"AP with", "'dev', apdev[0]['ifname'], 'up']) dev[0].wait_disconnected() dev[1].wait_disconnected() ev = hapd.wait_event([\"INTERFACE-ENABLED\"], timeout=10) if", "range(0, 10): hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) hwsim_utils.test_connectivity(dev[0], hapd) time.sleep(0.15) def", "dev1 is used to test stopping of AP side functionality", "None: raise Exception(\"Unexpected connection\") dev.request(\"DISCONNECT\") def test_ap_open_sta_enable_disable(dev, apdev): \"\"\"AP with", "Exception(\"No result reported\") if \"CTRL-EVENT-CONNECTED\" in ev: raise Exception(\"Unexpected connection\")", "key_mgmt=\"NONE\", scan_freq=\"2412\") def test_ap_open_start_disabled(dev, apdev): \"\"\"AP with open mode and", "in range(0, 3): dev[i].wait_connected(timeout=20) def test_ap_open_unknown_action(dev, apdev): \"\"\"AP with open", "in range(0, 3): dev[i].connect(\"open\", key_mgmt=\"NONE\", scan_freq=\"2412\", wait_connect=False) for i in", "radio work not completed\") ev = dev.wait_event([\"CTRL-EVENT-CONNECTED\"], timeout=0.1) if ev", "break if assoc != 3: raise Exception(\"Association Request frames not", "mode and unexpected association event\"\"\" hapd = hostapd.add_ap(apdev[0]['ifname'], { \"ssid\":" ]
[ "= nn.Linear(100, 1) def forward(self, input_): a1 = self.fc1(input_) h1", "def __init__(self, input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1", "y class MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self, input_size, output_size):", "out def train_epoch(model, opt, criterion, X, y, batch_size=50): model.train() losses", "= optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion", "gradients loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy()) return losses", "= list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute any missing", "zero-impute any missing values # obviously this is a very", "list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute any missing values", "= Path(output_dir_path) / filename torch.save(model, output_file_path) def subset_data(X): numerics =", "criterion, X, y, batch_size=50): model.train() losses = [] for beg_i", "super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout", "#!/usr/bin/env python # coding: utf-8 # pylint: disable-all from __future__", "class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001)", "y_hat will be (batch_size, 1) dim, so coerce target to", "this is a very rudimentary approach to handling missing values", "reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt,", "# a more sophisticated imputer can be implemented by making", "output_file_path = Path(output_dir_path) / filename torch.save(model, output_file_path) def subset_data(X): numerics", "def subset_data(X): numerics = [\"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\"]", "return y class RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__() self.fc1", "numerics = [\"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\"] # exclude", "LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y =", "be implemented by making use of custom transform, load, and", "bin_t_X, bin_t_y) def train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X", "= self.out(out) return out def train_epoch(model, opt, criterion, X, y,", "8) self.relu = nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out =", "torch.nn as nn import torch.optim as optim class BinModel(nn.Module): expected_target_type", "RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50)", "num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute any", "y, batch_size=50): model.train() losses = [] for beg_i in range(0,", "self.layer2(out) out = self.out(out) return out def train_epoch(model, opt, criterion,", "= self.fc2(dout) h2 = self.prelu(a2) a3 = self.out(h2) y =", "bin_t_y) def train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X =", "torch from torch.autograd import Variable import torch.nn as nn import", "# keep numeric features, zero-impute any missing values # obviously", "y = self.out(h2) return y class MultiModel(nn.Module): expected_target_type = torch.LongTensor", "= y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch)", "range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y, reg_model,", "n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X =", "self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2 =", "= [\"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\"] # exclude any", "nn.Linear(100, 1) def forward(self, input_): a1 = self.fc1(input_) h1 =", "for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def", "\"float32\", \"float64\"] # exclude any completely-missing columns when checking for", "out = self.layer1(input_) out = self.relu(out) out = self.layer2(out) out", "class_model, class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt =", "reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path =", "reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5):", "X.size(0), batch_size): x_batch = X[beg_i : beg_i + batch_size, :]", "= self.prelu(a2) y = self.out(h2) return y class MultiModel(nn.Module): expected_target_type", "(2) Compute diff loss = criterion(y_hat, y_batch) # (3) Compute", "y = self.out_act(a3) return y class RegModel(nn.Module): def __init__(self, input_size):", "when checking for numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep", "return reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion,", "import torch from torch.autograd import Variable import torch.nn as nn", "# coding: utf-8 # pylint: disable-all from __future__ import absolute_import", "self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2)", "def __init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8)", "the same y_batch = y[beg_i : beg_i + batch_size].reshape(-1, 1)", "beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch = Variable(y_batch)", "# (1) Forward y_hat = model(x_batch) # (2) Compute diff", "torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt,", "as nn import torch.optim as optim class BinModel(nn.Module): expected_target_type =", "pathlib import Path import torch from torch.autograd import Variable import", "= RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return", "approach to handling missing values # a more sophisticated imputer", "by making use of custom transform, load, and predict hooks", "will be (batch_size, 1) dim, so coerce target to look", "checking for numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric", "= nn.Sigmoid() def forward(self, input_): a1 = self.fc1(input_) h1 =", "import LabelEncoder from pathlib import Path import torch from torch.autograd", "= nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out", "= criterion(y_hat, y_batch) # (3) Compute gradients loss.backward() # (4)", "y_batch) # (3) Compute gradients loss.backward() # (4) update weights", "reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder =", "train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y", "y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward y_hat = model(x_batch)", "in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path,", "self.layer2 = nn.Linear(8, output_size) self.out = nn.Softmax() def forward(self, input_):", "completely-missing columns when checking for numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns)", "# (3) Compute gradients loss.backward() # (4) update weights opt.step()", "== 2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X):", "self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) def forward(self, input_):", "making use of custom transform, load, and predict hooks return", "be (batch_size, 1) dim, so coerce target to look the", "nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss() return class_model, class_opt,", "filename torch.save(model, output_file_path) def subset_data(X): numerics = [\"int16\", \"int32\", \"int64\",", "self.out = nn.Linear(100, 1) self.out_act = nn.Sigmoid() def forward(self, input_):", "losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels ==", "50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50,", "numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute", "a3 = self.out(h2) y = self.out_act(a3) return y class RegModel(nn.Module):", "in range(0, X.size(0), batch_size): x_batch = X[beg_i : beg_i +", "Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward y_hat =", "Variable import torch.nn as nn import torch.optim as optim class", "else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X): reg_model =", "= self.out(h2) return y class MultiModel(nn.Module): expected_target_type = torch.LongTensor def", "return out def train_epoch(model, opt, criterion, X, y, batch_size=50): model.train()", "def forward(self, input_): out = self.layer1(input_) out = self.relu(out) out", "dout = self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) y", "num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001)", "to handling missing values # a more sophisticated imputer can", "= torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size,", "= self.fc2(dout) h2 = self.prelu(a2) y = self.out(h2) return y", "batch_size=50): model.train() losses = [] for beg_i in range(0, X.size(0),", "how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features, zero-impute any missing values #", "= nn.Softmax() def forward(self, input_): out = self.layer1(input_) out =", "import torch.optim as optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor def", "__init__(self, input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 =", "disable-all from __future__ import absolute_import from sklearn.preprocessing import LabelEncoder from", "nn.Softmax() def forward(self, input_): out = self.layer1(input_) out = self.relu(out)", "= nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu", "RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model,", "self.layer1 = nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2 = nn.Linear(8,", "= optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels == 2", "range(0, X.size(0), batch_size): x_batch = X[beg_i : beg_i + batch_size,", "= self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) y =", "target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in", "losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if", "= self.prelu(a2) a3 = self.out(h2) y = self.out_act(a3) return y", "class_criterion = nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss() return", "torch.autograd import Variable import torch.nn as nn import torch.optim as", "y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y", "= self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1) a2 =", "dout = self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) a3", "else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss()", "nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act", "beg_i in range(0, X.size(0), batch_size): x_batch = X[beg_i : beg_i", "/ filename torch.save(model, output_file_path) def subset_data(X): numerics = [\"int16\", \"int32\",", "h2 = self.prelu(a2) y = self.out(h2) return y class MultiModel(nn.Module):", "BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt =", "def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels == 2", "class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X", ": beg_i + batch_size, :] # y_hat will be (batch_size,", "= self.out_act(a3) return y class RegModel(nn.Module): def __init__(self, input_size): super(RegModel,", "model.train() losses = [] for beg_i in range(0, X.size(0), batch_size):", "= model(x_batch) # (2) Compute diff loss = criterion(y_hat, y_batch)", "beg_i + batch_size, :] # y_hat will be (batch_size, 1)", "n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in", "opt, criterion, X, y, batch_size=50): model.train() losses = [] for", "1) dim, so coerce target to look the same y_batch", "= torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model,", "\"float64\"] # exclude any completely-missing columns when checking for numerics", "= target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e", "self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act = nn.Sigmoid()", "nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out = nn.Softmax() def forward(self,", "2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion =", "== 2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion", "torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model, reg_opt,", "self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2)", "output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) / filename torch.save(model, output_file_path) def", "target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type)", "= BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt", "(3) Compute gradients loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy())", "x_batch = X[beg_i : beg_i + batch_size, :] # y_hat", "from __future__ import absolute_import from sklearn.preprocessing import LabelEncoder from pathlib", "optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels == 2 else", "__init__(self, input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 =", "reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) / filename", "= nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2 = nn.Linear(8, output_size)", "self.fc2(dout) h2 = self.prelu(a2) y = self.out(h2) return y class", "def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder()", "input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU()", "bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion,", "handling missing values # a more sophisticated imputer can be", "100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act =", "h1 = self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout) h2", "train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y)", "nn.Linear(8, output_size) self.out = nn.Softmax() def forward(self, input_): out =", "train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path", "rudimentary approach to handling missing values # a more sophisticated", "= X[beg_i : beg_i + batch_size, :] # y_hat will", "nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1])", "range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"):", "import torch.nn as nn import torch.optim as optim class BinModel(nn.Module):", "optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self, input_size): super(BinModel,", "nn.Linear(100, 1) self.out_act = nn.Sigmoid() def forward(self, input_): a1 =", "\"float16\", \"float32\", \"float64\"] # exclude any completely-missing columns when checking", "class_model, class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y =", "weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model =", "num_labels == 2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def", "def train_epoch(model, opt, criterion, X, y, batch_size=50): model.train() losses =", "class MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel,", "import absolute_import from sklearn.preprocessing import LabelEncoder from pathlib import Path", "def forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1) dout", "class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(),", "def train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor)", "class BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__()", "reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss()", "= torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1 =", "reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt, class_criterion, n_epochs=5): target_encoder", "# (2) Compute diff loss = criterion(y_hat, y_batch) # (3)", "from torch.autograd import Variable import torch.nn as nn import torch.optim", "__init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu", "nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out", "X, y, batch_size=50): model.train() losses = [] for beg_i in", "LabelEncoder from pathlib import Path import torch from torch.autograd import", "absolute_import from sklearn.preprocessing import LabelEncoder from pathlib import Path import", "filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) / filename torch.save(model, output_file_path) def subset_data(X):", "a very rudimentary approach to handling missing values # a", "self.relu(out) out = self.layer2(out) out = self.out(out) return out def", "= Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward y_hat", "a2 = self.fc2(dout) h2 = self.prelu(a2) y = self.out(h2) return", "= self.layer1(input_) out = self.relu(out) out = self.layer2(out) out =", "e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X,", "from pathlib import Path import torch from torch.autograd import Variable", "input_): a1 = self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1)", "look the same y_batch = y[beg_i : beg_i + batch_size].reshape(-1,", "self.out(out) return out def train_epoch(model, opt, criterion, X, y, batch_size=50):", "def __init__(self, input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1", "coding: utf-8 # pylint: disable-all from __future__ import absolute_import from", "utf-8 # pylint: disable-all from __future__ import absolute_import from sklearn.preprocessing", "coerce target to look the same y_batch = y[beg_i :", "torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size,", "= self.relu(out) out = self.layer2(out) out = self.out(out) return out", "nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act = nn.Sigmoid() def forward(self,", "batch_size, :] # y_hat will be (batch_size, 1) dim, so", "BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__() self.fc1", "import Path import torch from torch.autograd import Variable import torch.nn", "input_size, output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu =", "reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for", "e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model,", "reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs):", "= nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2", "= nn.PReLU(1) self.out = nn.Linear(100, 1) self.out_act = nn.Sigmoid() def", "1) x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1)", "from sklearn.preprocessing import LabelEncoder from pathlib import Path import torch", "def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion", "= torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X,", "loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy()) return losses def", "class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y, reg_model, reg_opt, reg_criterion,", ": beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch =", "self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout)", "return losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels", "= nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1)", "transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for", "missing values # obviously this is a very rudimentary approach", "for beg_i in range(0, X.size(0), batch_size): x_batch = X[beg_i :", "super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu = nn.ReLU() self.layer2", "a more sophisticated imputer can be implemented by making use", "# y_hat will be (batch_size, 1) dim, so coerce target", "update weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model", "expected_target_type = torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__() self.fc1 =", "build_classifier(X, num_labels): class_model = BinModel(X.shape[1]) if num_labels == 2 else", "columns when checking for numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) #", "= self.relu1(a1) dout = self.dout(h1) a2 = self.fc2(dout) h2 =", "a1 = self.fc1(input_) h1 = self.relu1(a1) dout = self.dout(h1) a2", "batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() #", "(1) Forward y_hat = model(x_batch) # (2) Compute diff loss", "self.out = nn.Linear(100, 1) def forward(self, input_): a1 = self.fc1(input_)", "100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) def forward(self,", "more sophisticated imputer can be implemented by making use of", "nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 =", "same y_batch = y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch", "self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout =", "sophisticated imputer can be implemented by making use of custom", "output_size) self.out = nn.Softmax() def forward(self, input_): out = self.layer1(input_)", "1) def forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1)", "def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) / filename torch.save(model,", "for e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y) def", "= self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) a3 =", "to look the same y_batch = y[beg_i : beg_i +", "= LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y", "train_epoch(model, opt, criterion, X, y, batch_size=50): model.train() losses = []", ":] # y_hat will be (batch_size, 1) dim, so coerce", "y_hat = model(x_batch) # (2) Compute diff loss = criterion(y_hat,", "exclude any completely-missing columns when checking for numerics num_features =", "torch.optim as optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self,", "criterion(y_hat, y_batch) # (3) Compute gradients loss.backward() # (4) update", "batch_size): x_batch = X[beg_i : beg_i + batch_size, :] #", "= nn.Linear(8, output_size) self.out = nn.Softmax() def forward(self, input_): out", "2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion def build_regressor(X): reg_model", "self.out(h2) return y class MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self,", "opt.zero_grad() # (1) Forward y_hat = model(x_batch) # (2) Compute", "imputer can be implemented by making use of custom transform,", "reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion,", "lr=0.001) class_criterion = nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss()", "python # coding: utf-8 # pylint: disable-all from __future__ import", "reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e", "loss = criterion(y_hat, y_batch) # (3) Compute gradients loss.backward() #", "Compute gradients loss.backward() # (4) update weights opt.step() losses.append(loss.data.numpy()) return", "\"int32\", \"int64\", \"float16\", \"float32\", \"float64\"] # exclude any completely-missing columns", "nn.PReLU(1) self.out = nn.Linear(100, 1) def forward(self, input_): a1 =", "class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y, reg_model, reg_opt, reg_criterion, n_epochs=5):", "# exclude any completely-missing columns when checking for numerics num_features", "class_opt, class_criterion, n_epochs=5): target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y)", "self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1)", "self.out_act = nn.Sigmoid() def forward(self, input_): a1 = self.fc1(input_) h1", "obviously this is a very rudimentary approach to handling missing", "target to look the same y_batch = y[beg_i : beg_i", "(4) update weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels):", "pylint: disable-all from __future__ import absolute_import from sklearn.preprocessing import LabelEncoder", "y, reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y =", "= [] for beg_i in range(0, X.size(0), batch_size): x_batch =", "diff loss = criterion(y_hat, y_batch) # (3) Compute gradients loss.backward()", "torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y)", "= nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model,", "Variable(y_batch) opt.zero_grad() # (1) Forward y_hat = model(x_batch) # (2)", "[\"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\"] # exclude any completely-missing", "any missing values # obviously this is a very rudimentary", "any completely-missing columns when checking for numerics num_features = list(X.dropna(axis=1,", "so coerce target to look the same y_batch = y[beg_i", "numeric features, zero-impute any missing values # obviously this is", "nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu =", "reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X, y,", "target_encoder = LabelEncoder() target_encoder.fit(y) transformed_y = target_encoder.transform(y) bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor)", "use of custom transform, load, and predict hooks return X[num_features].fillna(0)", "model(x_batch) # (2) Compute diff loss = criterion(y_hat, y_batch) #", "values # obviously this is a very rudimentary approach to", "self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) a3 = self.out(h2)", "build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt = optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion =", "keep numeric features, zero-impute any missing values # obviously this", "out = self.out(out) return out def train_epoch(model, opt, criterion, X,", "= self.out(h2) y = self.out_act(a3) return y class RegModel(nn.Module): def", "self.layer1(input_) out = self.relu(out) out = self.layer2(out) out = self.out(out)", "self.dout(h1) a2 = self.fc2(dout) h2 = self.prelu(a2) y = self.out(h2)", "<reponame>andreakropp/datarobot-user-models #!/usr/bin/env python # coding: utf-8 # pylint: disable-all from", "reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) /", "forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1) dout =", "x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad() # (1) Forward", "if num_labels == 2 else MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(),", "as optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor def __init__(self, input_size):", "[] for beg_i in range(0, X.size(0), batch_size): x_batch = X[beg_i", "X[beg_i : beg_i + batch_size, :] # y_hat will be", "class_model = BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1], num_labels)", "= nn.PReLU(1) self.out = nn.Linear(100, 1) def forward(self, input_): a1", "self.relu1 = nn.ReLU() self.dout = nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100)", "missing values # a more sophisticated imputer can be implemented", "Path import torch from torch.autograd import Variable import torch.nn as", "implemented by making use of custom transform, load, and predict", "self.out(h2) y = self.out_act(a3) return y class RegModel(nn.Module): def __init__(self,", "very rudimentary approach to handling missing values # a more", "opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X, num_labels): class_model = BinModel(X.shape[1])", "nn.Sigmoid() def forward(self, input_): a1 = self.fc1(input_) h1 = self.relu1(a1)", "self.out = nn.Softmax() def forward(self, input_): out = self.layer1(input_) out", "# (4) update weights opt.step() losses.append(loss.data.numpy()) return losses def build_classifier(X,", "self.prelu(a2) a3 = self.out(h2) y = self.out_act(a3) return y class", "input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU()", "dim, so coerce target to look the same y_batch =", "in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y,", "reg_model, reg_opt, reg_criterion, n_epochs=5): reg_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor)", "out = self.relu(out) out = self.layer2(out) out = self.out(out) return", "# obviously this is a very rudimentary approach to handling", "= torch.from_numpy(X.values).type(torch.FloatTensor) reg_t_y = torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model,", "out = self.layer2(out) out = self.out(out) return out def train_epoch(model,", "\"int64\", \"float16\", \"float32\", \"float64\"] # exclude any completely-missing columns when", "class RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size,", "Path(output_dir_path) / filename torch.save(model, output_file_path) def subset_data(X): numerics = [\"int16\",", "torch.from_numpy(y.values).type(torch.FloatTensor) for e in range(n_epochs): train_epoch(reg_model, reg_opt, reg_criterion, reg_t_X, reg_t_y)", "nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100, 1) def", "= nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out = nn.Softmax() def", "input_): out = self.layer1(input_) out = self.relu(out) out = self.layer2(out)", "output_size): super(MultiModel, self).__init__() self.layer1 = nn.Linear(input_size, 8) self.relu = nn.ReLU()", "= torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs): train_epoch(class_model, class_opt, class_criterion, bin_t_X,", "y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch", "self.fc2(dout) h2 = self.prelu(a2) a3 = self.out(h2) y = self.out_act(a3)", "= nn.BCELoss() if num_labels == 2 else nn.CrossEntropyLoss() return class_model,", "output_file_path) def subset_data(X): numerics = [\"int16\", \"int32\", \"int64\", \"float16\", \"float32\",", "= Variable(y_batch) opt.zero_grad() # (1) Forward y_hat = model(x_batch) #", "expected_target_type = torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel, self).__init__() self.layer1", "self.relu = nn.ReLU() self.layer2 = nn.Linear(8, output_size) self.out = nn.Softmax()", "torch.save(model, output_file_path) def subset_data(X): numerics = [\"int16\", \"int32\", \"int64\", \"float16\",", "return y class MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self, input_size,", "MultiModel(X.shape[1], num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if", "train_epoch(class_model, class_opt, class_criterion, bin_t_X, bin_t_y) def train_regressor(X, y, reg_model, reg_opt,", "save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path) / filename torch.save(model, output_file_path)", "num_labels) class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels", "is a very rudimentary approach to handling missing values #", "y class RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__() self.fc1 =", "class_opt = optim.Adam(class_model.parameters(), lr=0.001) class_criterion = nn.BCELoss() if num_labels ==", "Forward y_hat = model(x_batch) # (2) Compute diff loss =", "subset_data(X): numerics = [\"int16\", \"int32\", \"int64\", \"float16\", \"float32\", \"float64\"] #", "reg_criterion, reg_t_X, reg_t_y) def save_torch_model(model, output_dir_path, filename=\"torch_bin.pth\"): output_file_path = Path(output_dir_path)", "optim.Adam(reg_model.parameters(), lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion def", "= nn.Linear(100, 1) self.out_act = nn.Sigmoid() def forward(self, input_): a1", "h2 = self.prelu(a2) a3 = self.out(h2) y = self.out_act(a3) return", "nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X, y, class_model, class_opt,", "sklearn.preprocessing import LabelEncoder from pathlib import Path import torch from", "bin_t_X = torch.from_numpy(X.values).type(torch.FloatTensor) bin_t_y = torch.from_numpy(transformed_y).type(class_model.expected_target_type) for e in range(n_epochs):", "y_batch = y[beg_i : beg_i + batch_size].reshape(-1, 1) x_batch =", "__future__ import absolute_import from sklearn.preprocessing import LabelEncoder from pathlib import", "losses = [] for beg_i in range(0, X.size(0), batch_size): x_batch", "a2 = self.fc2(dout) h2 = self.prelu(a2) a3 = self.out(h2) y", "# pylint: disable-all from __future__ import absolute_import from sklearn.preprocessing import", "+ batch_size, :] # y_hat will be (batch_size, 1) dim,", "= self.layer2(out) out = self.out(out) return out def train_epoch(model, opt,", "return class_model, class_opt, class_criterion def build_regressor(X): reg_model = RegModel(X.shape[1]) reg_opt", "num_labels): class_model = BinModel(X.shape[1]) if num_labels == 2 else MultiModel(X.shape[1],", "features, zero-impute any missing values # obviously this is a", "can be implemented by making use of custom transform, load,", "torch.FloatTensor def __init__(self, input_size): super(BinModel, self).__init__() self.fc1 = nn.Linear(input_size, 50)", "nn.Dropout(0.2) self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out =", "values # a more sophisticated imputer can be implemented by", "forward(self, input_): out = self.layer1(input_) out = self.relu(out) out =", "lr=0.001) reg_criterion = nn.MSELoss() return reg_model, reg_opt, reg_criterion def train_classifier(X,", "for numerics num_features = list(X.dropna(axis=1, how=\"all\").select_dtypes(include=numerics).columns) # keep numeric features,", "super(RegModel, self).__init__() self.fc1 = nn.Linear(input_size, 50) self.relu1 = nn.ReLU() self.dout", "self.out_act(a3) return y class RegModel(nn.Module): def __init__(self, input_size): super(RegModel, self).__init__()", "self.prelu(a2) y = self.out(h2) return y class MultiModel(nn.Module): expected_target_type =", "1) self.out_act = nn.Sigmoid() def forward(self, input_): a1 = self.fc1(input_)", "nn import torch.optim as optim class BinModel(nn.Module): expected_target_type = torch.FloatTensor", "MultiModel(nn.Module): expected_target_type = torch.LongTensor def __init__(self, input_size, output_size): super(MultiModel, self).__init__()", "self.fc2 = nn.Linear(50, 100) self.prelu = nn.PReLU(1) self.out = nn.Linear(100,", "if num_labels == 2 else nn.CrossEntropyLoss() return class_model, class_opt, class_criterion", "Compute diff loss = criterion(y_hat, y_batch) # (3) Compute gradients", "(batch_size, 1) dim, so coerce target to look the same", "import Variable import torch.nn as nn import torch.optim as optim", "+ batch_size].reshape(-1, 1) x_batch = Variable(x_batch) y_batch = Variable(y_batch) opt.zero_grad()" ]
[ "file where the weather should be saved. A relative path", "historical daily # weather to replay during the simulation. dome_closed_frac", "= num_nights * steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day,", "& (night_mjd < dome_open_at) self._table['open'][sl][closed] = False self.start_date = start_date", "# Dome is always closed before dusk and after dawn.", "then closed for the rest of the night. # 3.", "as np import astropy.time import astropy.table import astropy.units as u", "seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step", "being closed equal to extra_random_close_fraction. This is intended to include", "scenarios 1+2 with probability equal to the closed fraction. #", "self.num_nights = num_nights self.steps_per_day = steps_per_day self.replay = replay def", "the simulated weather is requested. Returns ------- table slice Slice", "True. \"\"\" config = desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite)", "= config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date", "weather with seed={} replay=\"{}\".' .format(seed, replay)) gen = np.random.RandomState(seed) #", "rest of the night. # 3. open and dusk and", "config to set any unspecified dates. start_date = config.first_day() stop_date", "isinstance(time_step, u.Quantity): time_step = time_step * u.min self.log = desiutil.log.get_logger()", "Parameters ---------- seed : int Random number seed to use", "restore is not None: fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname)", "= t0 + (np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd'] =", "desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows) / float(steps_per_day)) * u.day", "the night when the dome reopens. dome_open_at = bright_dusk[i] +", "else: self.log.info('Generating random weather with seed={} replay=\"{}\".' .format(seed, replay)) gen", "< extra_downtime] = 1. # Convert fractions of scheduled time", "each night of the survey. # This step is deterministic", "Time step calculating updates. Must evenly divide 24 hours. If", "1 history = ['Y{}'.format(year) for year in range(2007, 2018)] replay", "|= (night_mjd > bright_dawn[i] - dome_closed_time[i]) else: # Dome closed", "tabulated time, rather than using interpolation. Parameters ---------- time :", "stop_date self.num_nights = num_nights self.steps_per_day = steps_per_day self.replay = replay", "in range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup", "'Y2010,Y2012'. Replayed weather will be used cyclically if necessary. Random", "All other parameters are ignored when this is provided. A", "# Lookup the dome closed fractions for each night of", "= start_date self.stop_date = stop_date self.num_nights = num_nights self.steps_per_day =", "frac. Use the value of r[i] # as the fractional", "simulated conditions include seeing, transparency and the dome-open fraction. \"\"\"", "closed equal to extra_random_close_fraction. This is intended to include margin.", "Lookup the dome closed fractions for each night of the", "of the night. # 3. open and dusk and dawn,", "night. This occurs with probability frac / 2. closed[:] =", "the chance of the night being closed equal to extra_random_close_fraction.", "be replayed, e.g. 'Y2010,Y2012'. Replayed weather will be used cyclically", "float or :class:`astropy.units.Quantity`, optional Time step calculating updates. Must evenly", "','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the dome closed fractions for", "probability frac / 2. closed[:] = True elif r[i] <", "with probability 1 - frac. Use the value of r[i]", "is intended to include margin. \"\"\" def __init__(self, seed=1, replay='random',", "at the closest tabulated time, rather than using interpolation. Parameters", "dome_closed_time[i]) else: # Dome closed during the middle of the", "# Use a fixed number of random numbers to decouple", "to decouple from the seeing # and transparency sampling below.", "Randomly pick between three scenarios for partially closed nights: #", "desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils class Weather(object): \"\"\"Simulate", "# This step is deterministic and only depends on the", "equal to extra_random_close_fraction. This is intended to include margin. \"\"\"", "= np.ones(num_rows, bool) for i in range(num_nights): sl = slice(i", ">= dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed] = False self.start_date", "transparency sampling below. self._table['open'] = np.ones(num_rows, bool) for i in", "historical weather years. years_to_simulate = config.last_day().year - config.first_day().year + 1", "ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn - bright_dusk) # Randomly", "path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close the dome completely", "desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore is", "closed |= (night_mjd > bright_dawn[i] - dome_closed_time[i]) else: # Dome", "+ 1 history = ['Y{}'.format(year) for year in range(2007, 2018)]", "all night. pass elif dome_closed_frac[i] == 1: # Dome closed", "of scheduled time to hours per night. ilo, ihi =", "should be saved. A relative path name refers to the", "* steps_per_day, (i + 1) * steps_per_day) night_mjd = self._table['mjd'][sl]", "/ steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate", "reopens. dome_open_at = bright_dusk[i] + r[i] * (bright_dawn[i] - bright_dusk[i])", "an existing weather simulation from the specified file name. All", "gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1. #", "dome_closed_time[i] closed |= (night_mjd >= dome_closed_at) & (night_mjd < dome_open_at)", ": bool Silently overwrite any existing file when this is", "ephem = desisurvey.ephem.get_ephem() if restore is not None: fullname =", "atmospheric transparency time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32)", "random atmospheric seeing time series. dt_sec = 24 * 3600.", "closed = (night_mjd < bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if", "is requested. Returns ------- table slice Slice of precomputed table", "name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime :", "num_rows = num_nights * steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights,", "import datetime import numpy as np import astropy.time import astropy.table", "self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights =", "survey. # This step is deterministic and only depends on", "night. pass elif dome_closed_frac[i] == 1: # Dome closed all", "desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename))", "weather should be saved. A relative path name refers to", "years whose historical weather should be replayed, e.g. 'Y2010,Y2012'. Replayed", "steps_per_day) night_mjd = self._table['mjd'][sl] # Dome is always closed before", "night. # 2. open at dusk, then closed for the", "# Use our config to set any unspecified dates. start_date", "Time(s) when the simulated weather is requested. Returns ------- table", "astropy.time import astropy.table import astropy.units as u import desiutil.log import", "gen=gen).astype(np.float32) # Generate a random atmospheric transparency time series. self._table['transparency']", "REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize column of MJD timestamps.", "fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START'])", "probability equal to the closed fraction. # Use a fixed", "r[i] < 0.5 * dome_closed_frac[i]: # Dome closed during first", "weather will be used cyclically if necessary. Random weather will", "frac / 2. closed |= (night_mjd > bright_dawn[i] - dome_closed_time[i])", "range is taken from the survey config. Seeing and transparency", "dome_open_at - dome_closed_time[i] closed |= (night_mjd >= dome_closed_at) & (night_mjd", "sampling of all available years with historical weather data. Use", "import astropy.table import astropy.units as u import desiutil.log import desimodel.weather", "import astropy.units as u import desiutil.log import desimodel.weather import desisurvey.config", "- self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int) if np.any(offset <", "= self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return else: self.log.info('Generating random", "None: fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date(", "True elif r[i] < 0.5 * dome_closed_frac[i]: # Dome closed", "a file. The saved file can be restored using the", "if np.any(offset < 0) or np.any(offset > len(self._table)): raise ValueError('Cannot", "the constructor `restore` parameter. Parameters ---------- filename : str Name", "include seeing, transparency and the dome-open fraction. \"\"\" from __future__", ":meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite any", "= ['Y{}'.format(year) for year in range(2007, 2018)] replay = ','.join(gen.choice(history,", "open all night. pass elif dome_closed_frac[i] == 1: # Dome", "to {0}.'.format(filename)) def get(self, time): \"\"\"Get the weather conditions at", "pass elif dome_closed_frac[i] == 1: # Dome closed all night.", "as the fractional time during the night when the dome", "Parameters ---------- filename : str Name of the file where", "closed fraction. # Use a fixed number of random numbers", "self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random", "weather conditions. The simulated conditions include seeing, transparency and the", "= self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return", "if dome_closed_frac[i] == 0: # Dome open all night. pass", "weather to replay during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date,", "using the constructor `restore` parameter. Parameters ---------- filename : str", "(bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i] closed |=", "= times.mjd # Generate a random atmospheric seeing time series.", "config.first_day().year + 1 history = ['Y{}'.format(year) for year in range(2007,", "Must evenly divide 24 hours. If unitless float, will be", "numbers to decouple from the seeing # and transparency sampling", ": filename or None Restore an existing weather simulation from", "corresponding to the requested time(s). \"\"\" offset = np.floor( (time.mjd", "self._table['open'][sl][closed] = False self.start_date = start_date self.stop_date = stop_date self.num_nights", "stop_date, replay=replay) r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 <", "the conditions at the closest tabulated time, rather than using", "np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int) if", "import desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils class Weather(object):", "with a closed period during the night. # Pick scenarios", "range(num_nights): sl = slice(i * steps_per_day, (i + 1) *", "Initialize column of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times =", "fractions of scheduled time to hours per night. ilo, ihi", "the closed fraction. # Use a fixed number of random", "dome_closed_frac[i]: # Dome closed during last part of the night.", "decouple from the seeing # and transparency sampling below. self._table['open']", "self._table['mjd'][sl] # Dome is always closed before dusk and after", "self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return else:", "num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random atmospheric transparency time", "# This occurs with probability 1 - frac. Use the", "= stop_date self.num_nights = num_nights self.steps_per_day = steps_per_day self.replay =", "restore : filename or None Restore an existing weather simulation", "fixed number of random numbers to decouple from the seeing", "t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows) / float(steps_per_day))", "dome_closed_frac[i]: # Dome closed during first part of the night.", "< 0.5 * dome_closed_frac[i]: # Dome closed during first part", "<desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite any existing file when", "desisurvey.utils class Weather(object): \"\"\"Simulate weather conditions affecting observations. The start/stop", "start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2", "self.log.info('Saved weather to {0}.'.format(filename)) def get(self, time): \"\"\"Get the weather", "if num_nights <= 0: raise ValueError('Expected start_date < stop_date.') #", "are ignored when this is provided. A relative path name", "the night. # Pick scenarios 1+2 with probability equal to", "of r[i] # as the fractional time during the night", "for partially closed nights: # 1. closed from dusk, then", "config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def get(self, time):", "gen=gen).astype(np.float32) if replay == 'random': # Generate a bootstrap sampling", "partially closed nights: # 1. closed from dusk, then open", "margin. \"\"\" def __init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if", "night. # 3. open and dusk and dawn, with a", "seeing # and transparency sampling below. self._table['open'] = np.ones(num_rows, bool)", "24 hours. If unitless float, will be interpreted as minutes.", "= desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS']", "= int(round((1 * u.day / time_step).to(1).value)) if not np.allclose((steps_per_day *", "bright_dusk[i] + r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at", "of historical daily # weather to replay during the simulation.", "weather to a file. The saved file can be restored", ":class:`astropy.units.Quantity`, optional Time step calculating updates. Must evenly divide 24", "extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step = time_step * u.min", "self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return else: self.log.info('Generating", "= config.last_day() num_nights = (stop_date - start_date).days if num_nights <=", "of the historical weather years. years_to_simulate = config.last_day().year - config.first_day().year", "{0}.'.format(filename)) def get(self, time): \"\"\"Get the weather conditions at the", ": astropy.time.Time Time(s) when the simulated weather is requested. Returns", "or a comma-separated list of years whose historical weather should", "probability 1 - frac. Use the value of r[i] #", "Use the value of r[i] # as the fractional time", "weather should be replayed, e.g. 'Y2010,Y2012'. Replayed weather will be", "of the night. # 2. open at dusk, then closed", "the weather should be saved. A relative path name refers", "that the time step evenly divides 24 hours. steps_per_day =", "steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a", "before dusk and after dawn. closed = (night_mjd < bright_dusk[i])", "\"\"\"Simulate weather conditions affecting observations. The start/stop date range is", "not evenly divide 24 hours: {0}.' .format(time_step)) # Calculate the", "not isinstance(time_step, u.Quantity): time_step = time_step * u.min self.log =", "whose historical weather should be replayed, e.g. 'Y2010,Y2012'. Replayed weather", "intended to include margin. \"\"\" def __init__(self, seed=1, replay='random', time_step=5,", "astropy.table.Table(meta=meta) # Initialize column of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date)", "import print_function, division, absolute_import from datetime import datetime import numpy", "self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname)) return else: self.log.info('Generating random weather", "randomly, with the chance of the night being closed equal", "e.g. 'Y2010,Y2012'. Replayed weather will be used cyclically if necessary.", "desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights)", "dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: # Dome closed during last", "frac / 2. closed[:] = True elif r[i] < 0.5", "# Generate a random atmospheric seeing time series. dt_sec =", "bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if dome_closed_frac[i] == 0: #", "desisurvey.config import desisurvey.ephem import desisurvey.utils class Weather(object): \"\"\"Simulate weather conditions", "to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally", "Generate a random atmospheric transparency time series. self._table['transparency'] = desimodel.weather.sample_transp(", "dome_closed_frac[r2 < extra_downtime] = 1. # Convert fractions of scheduled", "specified time(s). Returns the conditions at the closest tabulated time,", "bootstrap sampling of the historical weather years. years_to_simulate = config.last_day().year", "Seeing and transparency values are stored with 32-bit floats to", "time_step = time_step * u.min self.log = desiutil.log.get_logger() config =", "{}.'.format(fullname)) return else: self.log.info('Generating random weather with seed={} replay=\"{}\".' .format(seed,", "the worst-case weather scenario. time_step : float or :class:`astropy.units.Quantity`, optional", "closed from dusk, then open the rest of the night.", "or None Restore an existing weather simulation from the specified", "number seed to use to generate stochastic conditions. The seed", "to set any unspecified dates. start_date = config.first_day() stop_date =", "2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the dome", "transparency realization independent of the value of ``replay``. replay :", ": float Additionally close the dome completely on some nights.", "to the closed fraction. # Use a fixed number of", "24 hours: {0}.' .format(time_step)) # Calculate the number of times", "fraction. # Use a fixed number of random numbers to", "with probability frac / 2. closed |= (night_mjd > bright_dawn[i]", "simulated weather is requested. Returns ------- table slice Slice of", "closed nights: # 1. closed from dusk, then open the", "or np.any(offset > len(self._table)): raise ValueError('Cannot get weather beyond tabulated", "step is deterministic and only depends on the config weather", "during the middle of the night. # This occurs with", "as minutes. restore : filename or None Restore an existing", "np.any(offset < 0) or np.any(offset > len(self._table)): raise ValueError('Cannot get", "provided. A relative path name refers to the :meth:`configuration output", "dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights) r2", "* steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table", "random weather with seed={} replay=\"{}\".' .format(seed, replay)) gen = np.random.RandomState(seed)", "times.mjd # Generate a random atmospheric seeing time series. dt_sec", "self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS']", "of the survey. # This step is deterministic and only", "this is True. \"\"\" config = desisurvey.config.Configuration() filename = config.get_path(filename)", "config = desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather", "dome_closed_frac[i] == 1: # Dome closed all night. This occurs", "atmospheric seeing time series. dt_sec = 24 * 3600. /", "closed fractions for each night of the survey. # This", "of the night. # This occurs with probability 1 -", "relative path name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`.", "32-bit floats to save some memory. Parameters ---------- seed :", "the night. # This occurs with probability frac / 2.", "replay def save(self, filename, overwrite=True): \"\"\"Save the generated weather to", "extra_downtime : float Additionally close the dome completely on some", "year in range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) #", "than using interpolation. Parameters ---------- time : astropy.time.Time Time(s) when", "weather is requested. Returns ------- table slice Slice of precomputed", "gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1. # Convert fractions of", "to a file. The saved file can be restored using", "elif r[i] < dome_closed_frac[i]: # Dome closed during last part", "the specified file name. All other parameters are ignored when", "2. closed |= (night_mjd < bright_dusk[i] + dome_closed_time[i]) elif r[i]", "The saved file can be restored using the constructor `restore`", "Check that the time step evenly divides 24 hours. steps_per_day", "The seed determines the same seeing and transparency realization independent", "of all available years with historical weather data. Use 'Y2015'", "| (night_mjd >= bright_dawn[i]) if dome_closed_frac[i] == 0: # Dome", "series. dt_sec = 24 * 3600. / steps_per_day self._table['seeing'] =", "dome completely on some nights. Nights are chosen randomly, with", "transparency time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if", "Convert fractions of scheduled time to hours per night. ilo,", "= (night_mjd < bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if dome_closed_frac[i]", "worst-case weather scenario. time_step : float or :class:`astropy.units.Quantity`, optional Time", "with probability equal to the closed fraction. # Use a", "closed during the middle of the night. # This occurs", "dawn, with a closed period during the night. # Pick", "number of random numbers to decouple from the seeing #", "after dawn. closed = (night_mjd < bright_dusk[i]) | (night_mjd >=", "weather will be a boostrap sampling of all available years", "any unspecified dates. start_date = config.first_day() stop_date = config.last_day() num_nights", "the rest of the night. # 2. open at dusk,", "to extra_random_close_fraction. This is intended to include margin. \"\"\" def", "- ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn =", "the time step evenly divides 24 hours. steps_per_day = int(round((1", "\"\"\" from __future__ import print_function, division, absolute_import from datetime import", "is provided. A relative path name refers to the :meth:`configuration", "= (stop_date - start_date).days if num_nights <= 0: raise ValueError('Expected", "np.any(offset > len(self._table)): raise ValueError('Cannot get weather beyond tabulated range.')", "used cyclically if necessary. Random weather will be a boostrap", "(stop_date - start_date).days if num_nights <= 0: raise ValueError('Expected start_date", "# weather to replay during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions(", "of random numbers to decouple from the seeing # and", "int Random number seed to use to generate stochastic conditions.", "Random number seed to use to generate stochastic conditions. The", "False self.start_date = start_date self.stop_date = stop_date self.num_nights = num_nights", "be restored using the constructor `restore` parameter. Parameters ---------- filename", "\"\"\" offset = np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day +", "self.log.info('Generating random weather with seed={} replay=\"{}\".' .format(seed, replay)) gen =", "and the dome-open fraction. \"\"\" from __future__ import print_function, division,", "Additionally close the dome completely on some nights. Nights are", "divide 24 hours: {0}.' .format(time_step)) # Calculate the number of", "dome_open_at = bright_dusk[i] + r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at", "* (bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i] closed", "between three scenarios for partially closed nights: # 1. closed", "ignored when this is provided. A relative path name refers", "the specified time(s). Returns the conditions at the closest tabulated", "existing weather simulation from the specified file name. All other", "fractional time during the night when the dome reopens. dome_open_at", "the dome reopens. dome_open_at = bright_dusk[i] + r[i] * (bright_dawn[i]", "u.min self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem()", "with seed={} replay=\"{}\".' .format(seed, replay)) gen = np.random.RandomState(seed) # Use", "our config to set any unspecified dates. start_date = config.first_day()", "scheduled time to hours per night. ilo, ihi = (start_date", "/ time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError(", "None Restore an existing weather simulation from the specified file", "to hours per night. ilo, ihi = (start_date - ephem.start_date).days,", "are chosen randomly, with the chance of the night being", "Returns the conditions at the closest tabulated time, rather than", "+ 0.5 ).astype(int) if np.any(offset < 0) or np.any(offset >", "STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize column of MJD", "a random atmospheric seeing time series. dt_sec = 24 *", "time(s). Returns the conditions at the closest tabulated time, rather", "desisurvey.ephem import desisurvey.utils class Weather(object): \"\"\"Simulate weather conditions affecting observations.", "1. # Convert fractions of scheduled time to hours per", "weather from {}.'.format(fullname)) return else: self.log.info('Generating random weather with seed={}", "the same seeing and transparency realization independent of the value", "bool Silently overwrite any existing file when this is True.", "always closed before dusk and after dawn. closed = (night_mjd", "0: # Dome open all night. pass elif dome_closed_frac[i] ==", "\"\"\"Simulate stochastic observing weather conditions. The simulated conditions include seeing,", "desisurvey.ephem.get_ephem() if restore is not None: fullname = config.get_path(restore) self._table", "years_to_simulate, replace=True)) # Lookup the dome closed fractions for each", "\"\"\"Get the weather conditions at the specified time(s). Returns the", "# Generate a random atmospheric transparency time series. self._table['transparency'] =", "daily # weather to replay during the simulation. dome_closed_frac =", "is always closed before dusk and after dawn. closed =", "not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step does", "= self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather", "replay)) gen = np.random.RandomState(seed) # Use our config to set", "24 hours. steps_per_day = int(round((1 * u.day / time_step).to(1).value)) if", "from dusk, then open the rest of the night. #", ": int Random number seed to use to generate stochastic", "< 0) or np.any(offset > len(self._table)): raise ValueError('Cannot get weather", "on the config weather # parameter, which specifies which year(s)", "# as the fractional time during the night when the", "= np.random.RandomState(seed) # Use our config to set any unspecified", "(bright_dawn - bright_dusk) # Randomly pick between three scenarios for", "* u.day self._table['mjd'] = times.mjd # Generate a random atmospheric", "deterministic and only depends on the config weather # parameter,", "= time_step * u.min self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration()", "float(steps_per_day)) * u.day self._table['mjd'] = times.mjd # Generate a random", "the dome-open fraction. \"\"\" from __future__ import print_function, division, absolute_import", "float Additionally close the dome completely on some nights. Nights", "overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def get(self, time): \"\"\"Get the", "__init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity):", "<reponame>mlandriau/surveysim \"\"\"Simulate stochastic observing weather conditions. The simulated conditions include", "# Check that the time step evenly divides 24 hours.", "This occurs with probability 1 - frac. Use the value", "the weather conditions at the specified time(s). Returns the conditions", "divide 24 hours. If unitless float, will be interpreted as", "the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r =", "the generated weather to a file. The saved file can", "seed determines the same seeing and transparency realization independent of", "= self._table['mjd'][sl] # Dome is always closed before dusk and", "closed[:] = True elif r[i] < 0.5 * dome_closed_frac[i]: #", "parameters are ignored when this is provided. A relative path", "<= 0: raise ValueError('Expected start_date < stop_date.') # Check that", "self._table['open'] = np.ones(num_rows, bool) for i in range(num_nights): sl =", "memory. Parameters ---------- seed : int Random number seed to", "self._table = astropy.table.Table(meta=meta) # Initialize column of MJD timestamps. t0", ".format(time_step)) # Calculate the number of times where we will", "astropy.units as u import desiutil.log import desimodel.weather import desisurvey.config import", "conditions at the specified time(s). Returns the conditions at the", "from {}.'.format(fullname)) return else: self.log.info('Generating random weather with seed={} replay=\"{}\".'", "of times where we will tabulate the weather. num_rows =", "unitless float, will be interpreted as minutes. restore : filename", "weather # parameter, which specifies which year(s) of historical daily", "Returns ------- table slice Slice of precomputed table containing row(s)", "only depends on the config weather # parameter, which specifies", "of the night. # This occurs with probability frac /", "of years whose historical weather should be replayed, e.g. 'Y2010,Y2012'.", "Use our config to set any unspecified dates. start_date =", "time step evenly divides 24 hours. steps_per_day = int(round((1 *", "* time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step does not evenly", "value of r[i] # as the fractional time during the", "This occurs with probability frac / 2. closed[:] = True", "self.steps_per_day + 0.5 ).astype(int) if np.any(offset < 0) or np.any(offset", "seeing time series. dt_sec = 24 * 3600. / steps_per_day", "night when the dome reopens. dome_open_at = bright_dusk[i] + r[i]", "nights: # 1. closed from dusk, then open the rest", "some memory. Parameters ---------- seed : int Random number seed", "to use to generate stochastic conditions. The seed determines the", "unspecified dates. start_date = config.first_day() stop_date = config.last_day() num_nights =", "where we will tabulate the weather. num_rows = num_nights *", "(start_date - ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn", "* steps_per_day) night_mjd = self._table['mjd'][sl] # Dome is always closed", "(time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int) if np.any(offset", "= np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int)", "evenly divides 24 hours. steps_per_day = int(round((1 * u.day /", "saved. A relative path name refers to the :meth:`configuration output", "- dome_closed_time[i]) else: # Dome closed during the middle of", "of precomputed table containing row(s) corresponding to the requested time(s).", "and only depends on the config weather # parameter, which", "all available years with historical weather data. Use 'Y2015' for", "# Dome closed all night. This occurs with probability frac", "gen = np.random.RandomState(seed) # Use our config to set any", "|= (night_mjd >= dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed] =", "24 * 3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec,", "random atmospheric transparency time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec,", "hours. If unitless float, will be interpreted as minutes. restore", "hours. steps_per_day = int(round((1 * u.day / time_step).to(1).value)) if not", "closed before dusk and after dawn. closed = (night_mjd <", "rather than using interpolation. Parameters ---------- time : astropy.time.Time Time(s)", "survey config. Seeing and transparency values are stored with 32-bit", "time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step does not evenly divide", "available years with historical weather data. Use 'Y2015' for the", "refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float", "int(round((1 * u.day / time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value,", "replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the dome closed", "replay=replay) r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime]", "for the rest of the night. # 3. open and", "bool) for i in range(num_nights): sl = slice(i * steps_per_day,", "= config.first_day() stop_date = config.last_day() num_nights = (stop_date - start_date).days", "history = ['Y{}'.format(year) for year in range(2007, 2018)] replay =", "if not isinstance(time_step, u.Quantity): time_step = time_step * u.min self.log", "with the chance of the night being closed equal to", "other parameters are ignored when this is provided. A relative", "ValueError('Expected start_date < stop_date.') # Check that the time step", "r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i]", "start_date self.stop_date = stop_date self.num_nights = num_nights self.steps_per_day = steps_per_day", "start_date < stop_date.') # Check that the time step evenly", "hours per night. ilo, ihi = (start_date - ephem.start_date).days, (stop_date", "elif dome_closed_frac[i] == 1: # Dome closed all night. This", "# Randomly pick between three scenarios for partially closed nights:", "'Y2015' for the worst-case weather scenario. time_step : float or", "ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn -", "print_function, division, absolute_import from datetime import datetime import numpy as", "Silently overwrite any existing file when this is True. \"\"\"", "parameter. Parameters ---------- filename : str Name of the file", "determines the same seeing and transparency realization independent of the", "evenly divide 24 hours: {0}.' .format(time_step)) # Calculate the number", "cyclically if necessary. Random weather will be a boostrap sampling", "we will tabulate the weather. num_rows = num_nights * steps_per_day", "NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize column of", "# 3. open and dusk and dawn, with a closed", "def save(self, filename, overwrite=True): \"\"\"Save the generated weather to a", "if restore is not None: fullname = config.get_path(restore) self._table =", "= astropy.table.Table(meta=meta) # Initialize column of MJD timestamps. t0 =", "middle of the night. # This occurs with probability 1", "Use a fixed number of random numbers to decouple from", "transparency values are stored with 32-bit floats to save some", "the rest of the night. # 3. open and dusk", "> bright_dawn[i] - dome_closed_time[i]) else: # Dome closed during the", "necessary. Random weather will be a boostrap sampling of all", "start/stop date range is taken from the survey config. Seeing", "filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def", "restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step = time_step *", "dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed] = False self.start_date =", "\"\"\"Save the generated weather to a file. The saved file", "= gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1. # Convert fractions", "overwrite : bool Silently overwrite any existing file when this", "bright_dusk[i] + dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: # Dome closed", "= desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay", "Generate a random atmospheric seeing time series. dt_sec = 24", "weather simulation from the specified file name. All other parameters", "0: raise ValueError('Expected start_date < stop_date.') # Check that the", "|= (night_mjd < bright_dusk[i] + dome_closed_time[i]) elif r[i] < dome_closed_frac[i]:", "will tabulate the weather. num_rows = num_nights * steps_per_day meta", "desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random atmospheric transparency", "'random': # Generate a bootstrap sampling of the historical weather", "a comma-separated list of years whose historical weather should be", "in range(num_nights): sl = slice(i * steps_per_day, (i + 1)", "be interpreted as minutes. restore : filename or None Restore", "* self.steps_per_day + 0.5 ).astype(int) if np.any(offset < 0) or", "# Convert fractions of scheduled time to hours per night.", "to generate stochastic conditions. The seed determines the same seeing", "per night. ilo, ihi = (start_date - ephem.start_date).days, (stop_date -", "path name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime", "self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY']", "parameter, which specifies which year(s) of historical daily # weather", "be saved. A relative path name refers to the :meth:`configuration", "* u.min self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem =", "bright_dusk) # Randomly pick between three scenarios for partially closed", "year(s) of historical daily # weather to replay during the", "numpy as np import astropy.time import astropy.table import astropy.units as", "scenario. time_step : float or :class:`astropy.units.Quantity`, optional Time step calculating", "(night_mjd >= dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed] = False", "of ``replay``. replay : str Either 'random' or a comma-separated", "historical weather data. Use 'Y2015' for the worst-case weather scenario.", "interpreted as minutes. restore : filename or None Restore an", "probability frac / 2. closed |= (night_mjd < bright_dusk[i] +", "# parameter, which specifies which year(s) of historical daily #", "Dome closed during first part of the night. # This", "night_mjd = self._table['mjd'][sl] # Dome is always closed before dusk", "Calculate the number of times where we will tabulate the", "requested. Returns ------- table slice Slice of precomputed table containing", "Restore an existing weather simulation from the specified file name.", "steps_per_day self.replay = replay def save(self, filename, overwrite=True): \"\"\"Save the", "save some memory. Parameters ---------- seed : int Random number", "timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows) /", "range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the", "closed during last part of the night. # This occurs", "self.start_date = start_date self.stop_date = stop_date self.num_nights = num_nights self.steps_per_day", "years_to_simulate = config.last_day().year - config.first_day().year + 1 history = ['Y{}'.format(year)", "during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r", "ValueError( 'Requested time_step does not evenly divide 24 hours: {0}.'", "ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac", "datetime import numpy as np import astropy.time import astropy.table import", "boostrap sampling of all available years with historical weather data.", "dome_closed_frac * (bright_dawn - bright_dusk) # Randomly pick between three", "str Either 'random' or a comma-separated list of years whose", "time_step does not evenly divide 24 hours: {0}.' .format(time_step)) #", "does not evenly divide 24 hours: {0}.' .format(time_step)) # Calculate", "* 3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32)", "1.): raise ValueError( 'Requested time_step does not evenly divide 24", "import numpy as np import astropy.time import astropy.table import astropy.units", "generated weather to a file. The saved file can be", "is not None: fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date", "Slice of precomputed table containing row(s) corresponding to the requested", "datetime import datetime import numpy as np import astropy.time import", "which specifies which year(s) of historical daily # weather to", "weather to {0}.'.format(filename)) def get(self, time): \"\"\"Get the weather conditions", "is deterministic and only depends on the config weather #", "pick between three scenarios for partially closed nights: # 1.", "replace=True)) # Lookup the dome closed fractions for each night", "1. closed from dusk, then open the rest of the", "bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i] closed |= (night_mjd >=", "config. Seeing and transparency values are stored with 32-bit floats", "with probability frac / 2. closed |= (night_mjd < bright_dusk[i]", "num_nights self.steps_per_day = steps_per_day self.replay = replay def save(self, filename,", "# Dome closed during first part of the night. #", "config.last_day().year - config.first_day().year + 1 history = ['Y{}'.format(year) for year", "and dusk and dawn, with a closed period during the", "< bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if dome_closed_frac[i] == 0:", "- config.first_day().year + 1 history = ['Y{}'.format(year) for year in", "self.stop_date = stop_date self.num_nights = num_nights self.steps_per_day = steps_per_day self.replay", "`restore` parameter. Parameters ---------- filename : str Name of the", "Dome is always closed before dusk and after dawn. closed", "rest of the night. # 2. open at dusk, then", "config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore is not", "< bright_dusk[i] + dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: # Dome", "import desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils", "divides 24 hours. steps_per_day = int(round((1 * u.day / time_step).to(1).value))", "- bright_dusk[i]) dome_closed_at = dome_open_at - dome_closed_time[i] closed |= (night_mjd", "time(s). \"\"\" offset = np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day", "output path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite any existing", "len(self._table)): raise ValueError('Cannot get weather beyond tabulated range.') return self._table[offset]", "file when this is True. \"\"\" config = desisurvey.config.Configuration() filename", "raise ValueError('Expected start_date < stop_date.') # Check that the time", "hours: {0}.' .format(time_step)) # Calculate the number of times where", "night. ilo, ihi = (start_date - ephem.start_date).days, (stop_date - ephem.start_date).days", "u import desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem import", "# Dome closed during the middle of the night. #", "use to generate stochastic conditions. The seed determines the same", "when the simulated weather is requested. Returns ------- table slice", "the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite", "* u.day / time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.):", "to replay during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date,", "The simulated conditions include seeing, transparency and the dome-open fraction.", "steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table =", "when this is True. \"\"\" config = desisurvey.config.Configuration() filename =", "the requested time(s). \"\"\" offset = np.floor( (time.mjd - self._table['mjd'][0])", "chosen randomly, with the chance of the night being closed", "generate stochastic conditions. The seed determines the same seeing and", "closed during first part of the night. # This occurs", "years. years_to_simulate = config.last_day().year - config.first_day().year + 1 history =", "class Weather(object): \"\"\"Simulate weather conditions affecting observations. The start/stop date", "replay during the simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay)", "self._table['mjd'][0]) * self.steps_per_day + 0.5 ).astype(int) if np.any(offset < 0)", ": str Either 'random' or a comma-separated list of years", "steps_per_day, (i + 1) * steps_per_day) night_mjd = self._table['mjd'][sl] #", "value of ``replay``. replay : str Either 'random' or a", "Dome closed during last part of the night. # This", "---------- time : astropy.time.Time Time(s) when the simulated weather is", "np.ones(num_rows, bool) for i in range(num_nights): sl = slice(i *", "from datetime import datetime import numpy as np import astropy.time", "A relative path name refers to the :meth:`configuration output path", "comma-separated list of years whose historical weather should be replayed,", "division, absolute_import from datetime import datetime import numpy as np", "will be a boostrap sampling of all available years with", "as u import desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem", "with 32-bit floats to save some memory. Parameters ---------- seed", "if replay == 'random': # Generate a bootstrap sampling of", "during first part of the night. # This occurs with", "+ dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: # Dome closed during", "weather years. years_to_simulate = config.last_day().year - config.first_day().year + 1 history", "containing row(s) corresponding to the requested time(s). \"\"\" offset =", "number of times where we will tabulate the weather. num_rows", "= num_nights self.steps_per_day = steps_per_day self.replay = replay def save(self,", "constructor `restore` parameter. Parameters ---------- filename : str Name of", "* (bright_dawn - bright_dusk) # Randomly pick between three scenarios", "to include margin. \"\"\" def __init__(self, seed=1, replay='random', time_step=5, restore=None,", "== 1: # Dome closed all night. This occurs with", "self.steps_per_day = steps_per_day self.replay = replay def save(self, filename, overwrite=True):", "def __init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step,", "probability frac / 2. closed |= (night_mjd > bright_dawn[i] -", "realization independent of the value of ``replay``. replay : str", "= desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows) / float(steps_per_day)) *", ": float or :class:`astropy.units.Quantity`, optional Time step calculating updates. Must", "conditions. The simulated conditions include seeing, transparency and the dome-open", "set any unspecified dates. start_date = config.first_day() stop_date = config.last_day()", "seed={} replay=\"{}\".' .format(seed, replay)) gen = np.random.RandomState(seed) # Use our", "interpolation. Parameters ---------- time : astropy.time.Time Time(s) when the simulated", "calculating updates. Must evenly divide 24 hours. If unitless float,", "historical weather should be replayed, e.g. 'Y2010,Y2012'. Replayed weather will", "This step is deterministic and only depends on the config", "the fractional time during the night when the dome reopens.", "= bright_dusk[i] + r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at =", "include margin. \"\"\" def __init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0):", "== 'random': # Generate a bootstrap sampling of the historical", "the night. # 3. open and dusk and dawn, with", "stop_date.') # Check that the time step evenly divides 24", "precomputed table containing row(s) corresponding to the requested time(s). \"\"\"", "This occurs with probability frac / 2. closed |= (night_mjd", "and after dawn. closed = (night_mjd < bright_dusk[i]) | (night_mjd", "overwrite=True): \"\"\"Save the generated weather to a file. The saved", "conditions. The seed determines the same seeing and transparency realization", "the value of ``replay``. replay : str Either 'random' or", "< stop_date.') # Check that the time step evenly divides", "refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite : bool", "conditions at the closest tabulated time, rather than using interpolation.", "slice Slice of precomputed table containing row(s) corresponding to the", "closed |= (night_mjd >= dome_closed_at) & (night_mjd < dome_open_at) self._table['open'][sl][closed]", ").astype(int) if np.any(offset < 0) or np.any(offset > len(self._table)): raise", "the night being closed equal to extra_random_close_fraction. This is intended", "{0}.' .format(time_step)) # Calculate the number of times where we", "to the requested time(s). \"\"\" offset = np.floor( (time.mjd -", "3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) #", "stochastic observing weather conditions. The simulated conditions include seeing, transparency", "then open the rest of the night. # 2. open", "0.5 * dome_closed_frac[i]: # Dome closed during first part of", "data. Use 'Y2015' for the worst-case weather scenario. time_step :", "close the dome completely on some nights. Nights are chosen", "= desimodel.weather.sample_seeing( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random atmospheric", "- frac. Use the value of r[i] # as the", "< dome_closed_frac[i]: # Dome closed during last part of the", "---------- filename : str Name of the file where the", "['Y{}'.format(year) for year in range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate,", "the seeing # and transparency sampling below. self._table['open'] = np.ones(num_rows,", "below. self._table['open'] = np.ones(num_rows, bool) for i in range(num_nights): sl", "time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError( 'Requested", "r[i] < dome_closed_frac[i]: # Dome closed during last part of", "= slice(i * steps_per_day, (i + 1) * steps_per_day) night_mjd", "(night_mjd >= bright_dawn[i]) if dome_closed_frac[i] == 0: # Dome open", "If unitless float, will be interpreted as minutes. restore :", "output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close the dome", "fractions for each night of the survey. # This step", "- bright_dusk) # Randomly pick between three scenarios for partially", "last part of the night. # This occurs with probability", "= 24 * 3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing( num_rows,", "and transparency values are stored with 32-bit floats to save", "offset = np.floor( (time.mjd - self._table['mjd'][0]) * self.steps_per_day + 0.5", "taken from the survey config. Seeing and transparency values are", "absolute_import from datetime import datetime import numpy as np import", "occurs with probability frac / 2. closed |= (night_mjd >", "sampling of the historical weather years. years_to_simulate = config.last_day().year -", "# Dome closed during last part of the night. #", "(night_mjd < dome_open_at) self._table['open'][sl][closed] = False self.start_date = start_date self.stop_date", "import astropy.time import astropy.table import astropy.units as u import desiutil.log", "'random' or a comma-separated list of years whose historical weather", "floats to save some memory. Parameters ---------- seed : int", "u.day / time_step).to(1).value)) if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise", "time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay", "Dome open all night. pass elif dome_closed_frac[i] == 1: #", "the file where the weather should be saved. A relative", "= (start_date - ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi]", "stored with 32-bit floats to save some memory. Parameters ----------", "2. closed[:] = True elif r[i] < 0.5 * dome_closed_frac[i]:", "can be restored using the constructor `restore` parameter. Parameters ----------", "conditions affecting observations. The start/stop date range is taken from", "# This occurs with probability frac / 2. closed |=", "Nights are chosen randomly, with the chance of the night", "conditions include seeing, transparency and the dome-open fraction. \"\"\" from", "# 2. open at dusk, then closed for the rest", "weather scenario. time_step : float or :class:`astropy.units.Quantity`, optional Time step", "* dome_closed_frac[i]: # Dome closed during first part of the", "= ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn - bright_dusk) #", "to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently", "desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore is not None: fullname", "dt_sec = 24 * 3600. / steps_per_day self._table['seeing'] = desimodel.weather.sample_seeing(", "tabulate the weather. num_rows = num_nights * steps_per_day meta =", "a fixed number of random numbers to decouple from the", "closed |= (night_mjd < bright_dusk[i] + dome_closed_time[i]) elif r[i] <", "desiutil.log import desimodel.weather import desisurvey.config import desisurvey.ephem import desisurvey.utils class", "file can be restored using the constructor `restore` parameter. Parameters", "if necessary. Random weather will be a boostrap sampling of", "- ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time =", "get(self, time): \"\"\"Get the weather conditions at the specified time(s).", "self.log.info('Restored weather from {}.'.format(fullname)) return else: self.log.info('Generating random weather with", "a bootstrap sampling of the historical weather years. years_to_simulate =", "+ r[i] * (bright_dawn[i] - bright_dusk[i]) dome_closed_at = dome_open_at -", "---------- seed : int Random number seed to use to", "name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite :", "which year(s) of historical daily # weather to replay during", "the middle of the night. # This occurs with probability", "< dome_open_at) self._table['open'][sl][closed] = False self.start_date = start_date self.stop_date =", "existing file when this is True. \"\"\" config = desisurvey.config.Configuration()", "of the file where the weather should be saved. A", "closed all night. This occurs with probability frac / 2.", "for the worst-case weather scenario. time_step : float or :class:`astropy.units.Quantity`,", "Generate a bootstrap sampling of the historical weather years. years_to_simulate", "STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize column", "meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta)", "# Generate a bootstrap sampling of the historical weather years.", "table containing row(s) corresponding to the requested time(s). \"\"\" offset", "= desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random': #", "and dawn, with a closed period during the night. #", "times = t0 + (np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd']", "occurs with probability frac / 2. closed |= (night_mjd <", "= steps_per_day self.replay = replay def save(self, filename, overwrite=True): \"\"\"Save", "self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random':", "updates. Must evenly divide 24 hours. If unitless float, will", "time_step * u.min self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem", "nights. Nights are chosen randomly, with the chance of the", "part of the night. # This occurs with probability frac", "path name refers to the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. overwrite", "1 - frac. Use the value of r[i] # as", "saved file can be restored using the constructor `restore` parameter.", "closed for the rest of the night. # 3. open", "t0 + (np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd'] = times.mjd", "scenarios for partially closed nights: # 1. closed from dusk,", "replay='random', time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step =", "Parameters ---------- time : astropy.time.Time Time(s) when the simulated weather", "num_nights = (stop_date - start_date).days if num_nights <= 0: raise", "self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from {}.'.format(fullname))", "replay : str Either 'random' or a comma-separated list of", "weather conditions affecting observations. The start/stop date range is taken", "of the value of ``replay``. replay : str Either 'random'", "Either 'random' or a comma-separated list of years whose historical", "= dome_closed_frac * (bright_dawn - bright_dusk) # Randomly pick between", "file. The saved file can be restored using the constructor", "(stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time", "num_nights <= 0: raise ValueError('Expected start_date < stop_date.') # Check", "str Name of the file where the weather should be", "desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay =", "when this is provided. A relative path name refers to", "# and transparency sampling below. self._table['open'] = np.ones(num_rows, bool) for", "start_date = config.first_day() stop_date = config.last_day() num_nights = (stop_date -", "time during the night when the dome reopens. dome_open_at =", "# Initialize column of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times", "float, will be interpreted as minutes. restore : filename or", "dome closed fractions for each night of the survey. #", "values are stored with 32-bit floats to save some memory.", "= desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore", "= desisurvey.ephem.get_ephem() if restore is not None: fullname = config.get_path(restore)", ":meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close the", "step calculating updates. Must evenly divide 24 hours. If unitless", "for year in range(2007, 2018)] replay = ','.join(gen.choice(history, years_to_simulate, replace=True))", "self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored weather from", "chance of the night being closed equal to extra_random_close_fraction. This", ": str Name of the file where the weather should", "a random atmospheric transparency time series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows,", "date range is taken from the survey config. Seeing and", "- dome_closed_time[i] closed |= (night_mjd >= dome_closed_at) & (night_mjd <", "bright_dawn[i] - dome_closed_time[i]) else: # Dome closed during the middle", "This is intended to include margin. \"\"\" def __init__(self, seed=1,", "replay=\"{}\".' .format(seed, replay)) gen = np.random.RandomState(seed) # Use our config", "seeing and transparency realization independent of the value of ``replay``.", "<desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close the dome completely on", "night being closed equal to extra_random_close_fraction. This is intended to", "on some nights. Nights are chosen randomly, with the chance", "dome reopens. dome_open_at = bright_dusk[i] + r[i] * (bright_dawn[i] -", "config.first_day() stop_date = config.last_day() num_nights = (stop_date - start_date).days if", "1) * steps_per_day) night_mjd = self._table['mjd'][sl] # Dome is always", "2. closed |= (night_mjd > bright_dawn[i] - dome_closed_time[i]) else: #", "(night_mjd > bright_dawn[i] - dome_closed_time[i]) else: # Dome closed during", "night of the survey. # This step is deterministic and", "restored using the constructor `restore` parameter. Parameters ---------- filename :", "the number of times where we will tabulate the weather.", "i in range(num_nights): sl = slice(i * steps_per_day, (i +", "will be interpreted as minutes. restore : filename or None", "night. # This occurs with probability frac / 2. closed", "time series. dt_sec = 24 * 3600. / steps_per_day self._table['seeing']", "return else: self.log.info('Generating random weather with seed={} replay=\"{}\".' .format(seed, replay))", "during the night when the dome reopens. dome_open_at = bright_dusk[i]", "Use 'Y2015' for the worst-case weather scenario. time_step : float", "are stored with 32-bit floats to save some memory. Parameters", "/ 2. closed[:] = True elif r[i] < 0.5 *", "from the survey config. Seeing and transparency values are stored", "= desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if restore is not None:", "step evenly divides 24 hours. steps_per_day = int(round((1 * u.day", "time to hours per night. ilo, ihi = (start_date -", "open and dusk and dawn, with a closed period during", "/ 2. closed |= (night_mjd > bright_dawn[i] - dome_closed_time[i]) else:", ".format(seed, replay)) gen = np.random.RandomState(seed) # Use our config to", "observing weather conditions. The simulated conditions include seeing, transparency and", "__future__ import print_function, division, absolute_import from datetime import datetime import", "for each night of the survey. # This step is", "raise ValueError( 'Requested time_step does not evenly divide 24 hours:", "and transparency realization independent of the value of ``replay``. replay", "dusk and dawn, with a closed period during the night.", "== 0: # Dome open all night. pass elif dome_closed_frac[i]", "# 1. closed from dusk, then open the rest of", "time : astropy.time.Time Time(s) when the simulated weather is requested.", "slice(i * steps_per_day, (i + 1) * steps_per_day) night_mjd =", "/ float(steps_per_day)) * u.day self._table['mjd'] = times.mjd # Generate a", "np import astropy.time import astropy.table import astropy.units as u import", "dt_sec=dt_sec, gen=gen).astype(np.float32) # Generate a random atmospheric transparency time series.", "desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random': # Generate", "seed : int Random number seed to use to generate", "if not np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step", "replay == 'random': # Generate a bootstrap sampling of the", "extra_downtime] = 1. # Convert fractions of scheduled time to", "some nights. Nights are chosen randomly, with the chance of", "/ 2. closed |= (night_mjd < bright_dusk[i] + dome_closed_time[i]) elif", "stochastic conditions. The seed determines the same seeing and transparency", "from __future__ import print_function, division, absolute_import from datetime import datetime", "open at dusk, then closed for the rest of the", "dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random': # Generate a bootstrap", "# Pick scenarios 1+2 with probability equal to the closed", "= config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def get(self,", "night. # This occurs with probability 1 - frac. Use", "dome-open fraction. \"\"\" from __future__ import print_function, division, absolute_import from", "fraction. \"\"\" from __future__ import print_function, division, absolute_import from datetime", "of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0 +", "r = gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] =", "0.5 ).astype(int) if np.any(offset < 0) or np.any(offset > len(self._table)):", "or :class:`astropy.units.Quantity`, optional Time step calculating updates. Must evenly divide", "affecting observations. The start/stop date range is taken from the", "simulation from the specified file name. All other parameters are", "simulation. dome_closed_frac = desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights)", "the dome completely on some nights. Nights are chosen randomly,", "dusk and after dawn. closed = (night_mjd < bright_dusk[i]) |", "ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi]", "num_nights * steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay)", "specified file name. All other parameters are ignored when this", "table slice Slice of precomputed table containing row(s) corresponding to", "r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1. # Convert", "time, rather than using interpolation. Parameters ---------- time : astropy.time.Time", "where the weather should be saved. A relative path name", "with probability frac / 2. closed[:] = True elif r[i]", "elif r[i] < 0.5 * dome_closed_frac[i]: # Dome closed during", "open the rest of the night. # 2. open at", "------- table slice Slice of precomputed table containing row(s) corresponding", "(night_mjd < bright_dusk[i]) | (night_mjd >= bright_dawn[i]) if dome_closed_frac[i] ==", "a closed period during the night. # Pick scenarios 1+2", "night. # Pick scenarios 1+2 with probability equal to the", "else: # Dome closed during the middle of the night.", "observations. The start/stop date range is taken from the survey", "= ','.join(gen.choice(history, years_to_simulate, replace=True)) # Lookup the dome closed fractions", "> len(self._table)): raise ValueError('Cannot get weather beyond tabulated range.') return", "= desimodel.weather.dome_closed_fractions( start_date, stop_date, replay=replay) r = gen.uniform(size=num_nights) r2 =", "# Calculate the number of times where we will tabulate", "= dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) #", "occurs with probability 1 - frac. Use the value of", "name. All other parameters are ignored when this is provided.", "not None: fullname = config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date =", "dusk, then closed for the rest of the night. #", "using interpolation. Parameters ---------- time : astropy.time.Time Time(s) when the", "seeing, transparency and the dome-open fraction. \"\"\" from __future__ import", "self.log = desiutil.log.get_logger() config = desisurvey.config.Configuration() ephem = desisurvey.ephem.get_ephem() if", "steps_per_day = int(round((1 * u.day / time_step).to(1).value)) if not np.allclose((steps_per_day", "first part of the night. # This occurs with probability", "self._table['mjd'] = times.mjd # Generate a random atmospheric seeing time", "(night_mjd < bright_dusk[i] + dome_closed_time[i]) elif r[i] < dome_closed_frac[i]: #", "when the dome reopens. dome_open_at = bright_dusk[i] + r[i] *", "3. open and dusk and dawn, with a closed period", "random numbers to decouple from the seeing # and transparency", "time): \"\"\"Get the weather conditions at the specified time(s). Returns", "\"\"\" config = desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved", "Dome closed during the middle of the night. # This", "weather conditions at the specified time(s). Returns the conditions at", "import desisurvey.config import desisurvey.ephem import desisurvey.utils class Weather(object): \"\"\"Simulate weather", "= replay def save(self, filename, overwrite=True): \"\"\"Save the generated weather", "Replayed weather will be used cyclically if necessary. Random weather", "Pick scenarios 1+2 with probability equal to the closed fraction.", "the closest tabulated time, rather than using interpolation. Parameters ----------", "depends on the config weather # parameter, which specifies which", "bright_dawn[i]) if dome_closed_frac[i] == 0: # Dome open all night.", "should be replayed, e.g. 'Y2010,Y2012'. Replayed weather will be used", "stop_date = config.last_day() num_nights = (stop_date - start_date).days if num_nights", "the value of r[i] # as the fractional time during", "filename : str Name of the file where the weather", "the dome closed fractions for each night of the survey.", "np.random.RandomState(seed) # Use our config to set any unspecified dates.", "num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay == 'random': # Generate a", "2. open at dusk, then closed for the rest of", "MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0 + (np.arange(num_rows)", ">= bright_dawn[i]) if dome_closed_frac[i] == 0: # Dome open all", "list of years whose historical weather should be replayed, e.g.", "= 1. # Convert fractions of scheduled time to hours", "file name. All other parameters are ignored when this is", "desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day", "at the specified time(s). Returns the conditions at the closest", "same seeing and transparency realization independent of the value of", "from the specified file name. All other parameters are ignored", "dome_closed_frac[i] == 0: # Dome open all night. pass elif", "weather. num_rows = num_nights * steps_per_day meta = dict(START=str(start_date), STOP=str(stop_date),", "independent of the value of ``replay``. replay : str Either", "three scenarios for partially closed nights: # 1. closed from", "= False self.start_date = start_date self.stop_date = stop_date self.num_nights =", "row(s) corresponding to the requested time(s). \"\"\" offset = np.floor(", "frac / 2. closed |= (night_mjd < bright_dusk[i] + dome_closed_time[i])", "during last part of the night. # This occurs with", "dusk, then open the rest of the night. # 2.", "this is provided. A relative path name refers to the", "the night. # This occurs with probability 1 - frac.", "``replay``. replay : str Either 'random' or a comma-separated list", "the :meth:`configuration output path <desisurvey.config.Configuration.get_path>`. extra_downtime : float Additionally close", "start_date).days if num_nights <= 0: raise ValueError('Expected start_date < stop_date.')", "'Requested time_step does not evenly divide 24 hours: {0}.' .format(time_step))", "sampling below. self._table['open'] = np.ones(num_rows, bool) for i in range(num_nights):", "of the night being closed equal to extra_random_close_fraction. This is", "+ (np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd'] = times.mjd #", "series. self._table['transparency'] = desimodel.weather.sample_transp( num_rows, dt_sec=dt_sec, gen=gen).astype(np.float32) if replay ==", "equal to the closed fraction. # Use a fixed number", "Weather(object): \"\"\"Simulate weather conditions affecting observations. The start/stop date range", "Random weather will be a boostrap sampling of all available", "completely on some nights. Nights are chosen randomly, with the", "times where we will tabulate the weather. num_rows = num_nights", "for i in range(num_nights): sl = slice(i * steps_per_day, (i", "1+2 with probability equal to the closed fraction. # Use", "is True. \"\"\" config = desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename,", "= config.last_day().year - config.first_day().year + 1 history = ['Y{}'.format(year) for", "transparency and the dome-open fraction. \"\"\" from __future__ import print_function,", "self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to {0}.'.format(filename)) def get(self, time): \"\"\"Get", "time_step : float or :class:`astropy.units.Quantity`, optional Time step calculating updates.", "bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn - bright_dusk)", "the historical weather years. years_to_simulate = config.last_day().year - config.first_day().year +", "= gen.uniform(size=num_nights) r2 = gen.uniform(size=num_nights) dome_closed_frac[r2 < extra_downtime] = 1.", "Dome closed all night. This occurs with probability frac /", "0) or np.any(offset > len(self._table)): raise ValueError('Cannot get weather beyond", "extra_random_close_fraction. This is intended to include margin. \"\"\" def __init__(self,", "dome_closed_time = dome_closed_frac * (bright_dawn - bright_dusk) # Randomly pick", "the config weather # parameter, which specifies which year(s) of", "dates. start_date = config.first_day() stop_date = config.last_day() num_nights = (stop_date", "dome_closed_at = dome_open_at - dome_closed_time[i] closed |= (night_mjd >= dome_closed_at)", "ilo, ihi = (start_date - ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk", "minutes. restore : filename or None Restore an existing weather", "to save some memory. Parameters ---------- seed : int Random", "u.day self._table['mjd'] = times.mjd # Generate a random atmospheric seeing", "at dusk, then closed for the rest of the night.", "u.Quantity): time_step = time_step * u.min self.log = desiutil.log.get_logger() config", "config.get_path(restore) self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date =", "the weather. num_rows = num_nights * steps_per_day meta = dict(START=str(start_date),", "(np.arange(num_rows) / float(steps_per_day)) * u.day self._table['mjd'] = times.mjd # Generate", "1: # Dome closed all night. This occurs with probability", "all night. This occurs with probability frac / 2. closed[:]", "config weather # parameter, which specifies which year(s) of historical", "def get(self, time): \"\"\"Get the weather conditions at the specified", "(i + 1) * steps_per_day) night_mjd = self._table['mjd'][sl] # Dome", "import desisurvey.ephem import desisurvey.utils class Weather(object): \"\"\"Simulate weather conditions affecting", "be used cyclically if necessary. Random weather will be a", "years with historical weather data. Use 'Y2015' for the worst-case", "dict(START=str(start_date), STOP=str(stop_date), NIGHTS=num_nights, STEPS=steps_per_day, REPLAY=replay) self._table = astropy.table.Table(meta=meta) # Initialize", "path <desisurvey.config.Configuration.get_path>`. overwrite : bool Silently overwrite any existing file", "= astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP'])", "during the night. # Pick scenarios 1+2 with probability equal", "dome_open_at) self._table['open'][sl][closed] = False self.start_date = start_date self.stop_date = stop_date", "weather data. Use 'Y2015' for the worst-case weather scenario. time_step", "a boostrap sampling of all available years with historical weather", "astropy.table import astropy.units as u import desiutil.log import desimodel.weather import", "+ 1) * steps_per_day) night_mjd = self._table['mjd'][sl] # Dome is", "= dome_open_at - dome_closed_time[i] closed |= (night_mjd >= dome_closed_at) &", "self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day =", "= desisurvey.config.Configuration() filename = config.get_path(filename) self._table.write(filename, overwrite=overwrite) self.log.info('Saved weather to", "r[i] # as the fractional time during the night when", "= True elif r[i] < 0.5 * dome_closed_frac[i]: # Dome", "be a boostrap sampling of all available years with historical", "np.allclose((steps_per_day * time_step).to(u.day).value, 1.): raise ValueError( 'Requested time_step does not", "occurs with probability frac / 2. closed[:] = True elif", "dawn. closed = (night_mjd < bright_dusk[i]) | (night_mjd >= bright_dawn[i])", "column of MJD timestamps. t0 = desisurvey.utils.local_noon_on_date(start_date) times = t0", "sl = slice(i * steps_per_day, (i + 1) * steps_per_day)", "self.num_nights = self._table.meta['NIGHTS'] self.steps_per_day = self._table.meta['STEPS'] self.replay = self._table.meta['REPLAY'] self.log.info('Restored", "# Dome open all night. pass elif dome_closed_frac[i] == 1:", "will be used cyclically if necessary. Random weather will be", "the survey config. Seeing and transparency values are stored with", "specifies which year(s) of historical daily # weather to replay", "the night. # 2. open at dusk, then closed for", "filename, overwrite=True): \"\"\"Save the generated weather to a file. The", "save(self, filename, overwrite=True): \"\"\"Save the generated weather to a file.", "from the seeing # and transparency sampling below. self._table['open'] =", "closed period during the night. # Pick scenarios 1+2 with", "= ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac * (bright_dawn", "The start/stop date range is taken from the survey config.", "evenly divide 24 hours. If unitless float, will be interpreted", "period during the night. # Pick scenarios 1+2 with probability", "filename or None Restore an existing weather simulation from the", "Name of the file where the weather should be saved.", "with historical weather data. Use 'Y2015' for the worst-case weather", "is taken from the survey config. Seeing and transparency values", "the survey. # This step is deterministic and only depends", "- start_date).days if num_nights <= 0: raise ValueError('Expected start_date <", "optional Time step calculating updates. Must evenly divide 24 hours.", "seed to use to generate stochastic conditions. The seed determines", "import desisurvey.utils class Weather(object): \"\"\"Simulate weather conditions affecting observations. The", "\"\"\" def __init__(self, seed=1, replay='random', time_step=5, restore=None, extra_downtime=0): if not", "config.last_day() num_nights = (stop_date - start_date).days if num_nights <= 0:", "time_step=5, restore=None, extra_downtime=0): if not isinstance(time_step, u.Quantity): time_step = time_step", "and transparency sampling below. self._table['open'] = np.ones(num_rows, bool) for i", "any existing file when this is True. \"\"\" config =", "astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date( self._table.meta['STOP']) self.num_nights", "overwrite any existing file when this is True. \"\"\" config", "self._table = astropy.table.Table.read(fullname) self.start_date = desisurvey.utils.get_date( self._table.meta['START']) self.stop_date = desisurvey.utils.get_date(", "requested time(s). \"\"\" offset = np.floor( (time.mjd - self._table['mjd'][0]) *", "self.replay = replay def save(self, filename, overwrite=True): \"\"\"Save the generated", "astropy.time.Time Time(s) when the simulated weather is requested. Returns -------", "ihi = (start_date - ephem.start_date).days, (stop_date - ephem.start_date).days bright_dusk =", "closest tabulated time, rather than using interpolation. Parameters ---------- time", "bright_dusk = ephem._table['brightdusk'].data[ilo:ihi] bright_dawn = ephem._table['brightdawn'].data[ilo:ihi] dome_closed_time = dome_closed_frac *", "replayed, e.g. 'Y2010,Y2012'. Replayed weather will be used cyclically if" ]
[ "TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object", "def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object to output to std.stdout.\"\"\"", "as the headers are printed. The totals are stored as", "len(headers)): header = headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')] =", "to output to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer)", "output csv compatible formats. Will write rows to CSV file", "2.0 (the \"License\"); # you may not use this file", "created ExportPrinter object. \"\"\" my_handle = open(file_name) writer = UnicodeWriter(my_handle,", "queue ... data = self.queue.getvalue() data = data.decode('utf-8') # ...", "in the table is first created. Then the totals are", "for CSV output. Args: rows: list of rows to write.", "files. GetTsvScreenPrinter: Returns an instantiated object to output to the", "utf-8. Taken mostly / directly from Python docs: # http://docs.python.org/library/csv.html", "many rows were returned vs rows that were matched.\"\"\" items", "totals in the proper columns, a position index of the", "if it is special. \"\"\" if input_value and input_value[0] in", "results matched by the query. This is not the sum", "a dict, where the key is the metric name and", "is encoded in the given encoding. \"\"\" def __init__(self, f,", "table is first created. Then the totals are added by", "a list of lists respectively and process them as needed.", "results): \"\"\"Outputs all the dimension and metric names in order.\"\"\"", "not the sum of the values returned in the response.", "profile name along with the qurey.\"\"\" profile_name = '' info", "'*', '=') # TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns", "list or a list of lists respectively and process them", "OutputProfileName(self, results): \"\"\"Outputs the profile name along with the qurey.\"\"\"", "the data feed as tabular data.\"\"\" def __init__(self, writer): \"\"\"Initializes", "results: API Response from Core Reporting API. \"\"\" # Create", "% sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs all the dimension", "disable=g-bad-name def writerows(self, rows): \"\"\"Writes rows for CSV output. Args:", "str(results.get('totalResults')) output = [ ['Rows Returned', items], ['Rows Matched', matched]", "string The name of the file to output to. Returns:", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "provides utitlites to both print TSV files to the standard", "writerow and writerow, which accepts a list or a list", "def OutputHeaders(self, results): \"\"\"Outputs all the dimension and metric names", "are stored as a dict, where the key is the", "row of empty strings. Args: results: API Response from Core", "\"\"\" for row in rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility class", "the first character of a string if it is special", "in the proper columns, a position index of the metric", "(<NAME>)' import codecs import csv import StringIO import sys import", "dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object to", "f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output to a queue", "2015 Google Inc. All rights reserved. # # Licensed under", "# ... and reencode it into the target encoding data", "the qurey.\"\"\" profile_name = '' info = results.get('profileInfo') if info:", "query used.\"\"\" self.writer.writerow(['These query parameters were used:']) query = results.get('query')", "sampled_text = 'do not' if results.get('containsSampledData'): sampled_text = 'do' row_text", "# Replace any first characters that have an = with", "dict, where the key is the metric name and the", "by position to a row of empty strings. Args: results:", "ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object to output to", "the data export API. \"\"\" if not results.get('rows'): self.writer.writerow('No Results", "response. This will align the metric totals in the same", "\"\"\"Outputs how many rows were returned vs rows that were", "This will align the metric totals in the same columns", "if not results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results)", "the values returned in the response. This will align the", "self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs how many rows were returned", "the same columns as the headers are printed. The totals", "sys import types # A list of special characters that", "ExportPrinter object to output to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab')", "the metric name and it's position in the table is", "rows in the table.\"\"\" # Replace any first characters that", "for metric_name, metric_total in totals.iteritems(): index = metric_index[metric_name] row[index] =", "CSV row. Args: row: list The row to write to", "use this file except in compliance with the License. #", "the target encoding data = self.encoder.encode(data) # write to the", "csv module to output csv compatible formats. Will write rows", "\"\"\"Outputs the profile name along with the qurey.\"\"\" profile_name =", "metric totals in the same columns as the headers are", "reserved. # # Licensed under the Apache License, Version 2.0", "This uses the writer object to output the data in", "tabular data. \"\"\" __author__ = 'api.nickm@ (<NAME>)' import codecs import", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "have an = with '= for row in results.get('rows'): out_row", "Test leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object to", "the profile name along with the qurey.\"\"\" profile_name = ''", "results.get('columnHeaders') for index in range(0, len(headers)): header = headers[index] if", "License. # You may obtain a copy of the License", "This is not the sum of the values returned in", "list of lists respectively and process them as needed. \"\"\"", "# Create the metric position index. metric_index = {} headers", "logic handles all the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated", "the position index to output the totals in the right", "qurey.\"\"\" profile_name = '' info = results.get('profileInfo') if info: profile_name", "if it is special in Excel. Args: input_value: string The", "under the License is distributed on an \"AS IS\" BASIS,", "'=') # TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a", "rows to CSV file \"f\", which is encoded in the", "index of the metric name and it's position in the", "the totals in the right columns. totals = results.get('totalsForAllResults') for", "Typically an instance of UnicodeWriter. The interface for this object", "License for the specific language governing permissions and # limitations", "encoding='utf-8', **kwds): # Redirect output to a queue self.queue =", "writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output", "object provides two methods, writerow and writerow, which accepts a", "to convert a Data Export API reponse into TSV. This", "results): \"\"\"Outputs the profile name along with the qurey.\"\"\" profile_name", "all results matched by the query. This is not the", "retrieved from the Data Export API. This uses the writer", "StringIO import sys import types # A list of special", "into the target encoding data = self.encoder.encode(data) # write to", "def OutputQueryInfo(self, results): \"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These query parameters", "the right columns. totals = results.get('totalsForAllResults') for metric_name, metric_total in", "UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object", "the given encoding. \"\"\" def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds):", "= open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter():", "= info.get('profileName') self.writer.writerow(['Report For View (Profile): ', profile_name]) def OutputQueryInfo(self,", "the queue ... data = self.queue.getvalue() data = data.decode('utf-8') #", "is special in Excel. Args: input_value: string The value to", "to files. GetTsvScreenPrinter: Returns an instantiated object to output to", "it is special in Excel. Args: input_value: string The value", "been sampled.\"\"\" sampled_text = 'do not' if results.get('containsSampledData'): sampled_text =", "in the Data Export API. Args: results: The response from", "if type(value) == types.ListType: value = ','.join(value) else: value =", "how many rows were returned vs rows that were matched.\"\"\"", "For All Rows Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes the first", "object to output to the screen. UnicodeWriter(): Utf-8 encodes output.", "header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs all", "def writerow(self, row): \"\"\"Writes a CSV row. Args: row: list", "whether the resuls have been sampled.\"\"\" sampled_text = 'do not'", "to be escaped. SPECIAL_CHARS = ('+', '-', '/', '*', '=')", "and process them as needed. \"\"\" self.writer = writer def", "to output the totals in the right columns. totals =", "dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object to", "to the standard output as well as directly to a", "response from the data export API. \"\"\" if not results.get('rows'):", "import StringIO import sys import types # A list of", "right columns. totals = results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems():", "open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns", "Will write rows to CSV file \"f\", which is encoded", "to CSV file \"f\", which is encoded in the given", "Redirect output to a queue self.queue = StringIO.StringIO() self.writer =", "in compliance with the License. # You may obtain a", "screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the Data Export", "object to output to file_name. Args: file_name: string The name", "output to utf-8. Taken mostly / directly from Python docs:", "data retrieved from the Data Export API. This uses the", "software # distributed under the License is distributed on an", "row: list The row to write to the CSV output.", "profile_name = '' info = results.get('profileInfo') if info: profile_name =", "writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a", "not' if results.get('containsSampledData'): sampled_text = 'do' row_text = 'These results", "were matched.\"\"\" items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output =", "# limitations under the License. \"\"\"Utility to convert a Data", "... and reencode it into the target encoding data =", "rows were returned vs rows that were matched.\"\"\" items =", "\"\"\"Outputs all the dimension and metric names in order.\"\"\" row", "the headers are printed. The totals are stored as a", "The totals are stored as a dict, where the key", "sampled data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs all", "\"\"\"Writes rows for CSV output. Args: rows: list of rows", "Use the position index to output the totals in the", "rows to write. \"\"\" for row in rows: self.writerow(row) class", "output as well as directly to a file. This logic", "directly to a file. This logic handles all the utf-8", "ExportPrinter object to output to std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab')", "were used:']) query = results.get('query') for key, value in query.iteritems():", "given encoding. \"\"\" def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): #", "row = [] for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def", "resuls have been sampled.\"\"\" sampled_text = 'do not' if results.get('containsSampledData'):", "utitlites to both print TSV files to the standard output", "Then the totals are added by position to a row", "the License. \"\"\"Utility to convert a Data Export API reponse", "object to output to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return", "Rows Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes the first character of", "headers are printed. The totals are stored as a dict,", "the Data Export API response into tabular data. \"\"\" __author__", "of empty strings. Args: results: API Response from Core Reporting", "return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object to output", "if results.get('containsSampledData'): sampled_text = 'do' row_text = 'These results %s", "dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() # pylint:", "self.stream.write(data) # empty queue self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self,", "the Data Export API. This uses the writer object to", "\"\"\" __author__ = 'api.nickm@ (<NAME>)' import codecs import csv import", "len(headers) # Use the position index to output the totals", "Export API reponse into TSV. This provides utitlites to both", "self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def", "into tabular data. \"\"\" __author__ = 'api.nickm@ (<NAME>)' import codecs", "name and it's position in the table is first created.", "the Data Export API. Args: results: The response from the", "in results.get('rows'): out_row = [] for cell in row: cell", "uses the writer object to output the data in the", "Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals", "in the right columns. totals = results.get('totalsForAllResults') for metric_name, metric_total", "writerows(self, rows): \"\"\"Writes rows for CSV output. Args: rows: list", "used.\"\"\" self.writer.writerow(['These query parameters were used:']) query = results.get('query') for", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "Output(self, results): \"\"\"Outputs formatted rows of data retrieved from the", "import sys import types # A list of special characters", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "output to a queue self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue,", "the value is the total. To align these totals in", "/ directly from Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A", "= ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs how many", "data.decode('utf-8') # ... and reencode it into the target encoding", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "info: profile_name = info.get('profileName') self.writer.writerow(['Report For View (Profile): ', profile_name])", "= str(results.get('totalResults')) output = [ ['Rows Returned', items], ['Rows Matched',", "'METRIC': metric_index[header.get('name')] = index # Create a row of empty", "= [] for cell in row: cell = ExcelEscape(cell) out_row.append(cell)", "to in writing, software # distributed under the License is", "Returns: The newly created ExportPrinter object. \"\"\" my_handle = open(file_name)", "def OutputProfileName(self, results): \"\"\"Outputs the profile name along with the", "output. ExportPrinter(): Converts the Data Export API response into tabular", "# See the License for the specific language governing permissions", "in row]) # Fetch UTF-8 output from the queue ...", "self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs the profile name along with", "Args: results: API Response from Core Reporting API. \"\"\" #", "API Response from Core Reporting API. \"\"\" # Create the", "The interface for this object provides two methods, writerow and", "a ExportPrinter object to output to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout,", "OutputRows(self, results): \"\"\"Outputs all the rows in the table.\"\"\" #", "or agreed to in writing, software # distributed under the", "special in Excel. Args: input_value: string The value to escape.", "matched by the query. This is not the sum of", "required by applicable law or agreed to in writing, software", "ExcelEscape(input_value): \"\"\"Escapes the first character of a string if it", "in order.\"\"\" row = [] for header in results.get('columnHeaders'): row.append(header.get('name'))", "UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output to utf-8.", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "totals are stored as a dict, where the key is", "with the License. # You may obtain a copy of", "Returns an instantiated object to output to files. GetTsvScreenPrinter: Returns", "[] for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results):", "string if it is special in Excel. Args: input_value: string", "Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV writer which", "= ('+', '-', '/', '*', '=') # TODO(nm): Test leading", "empty strings. Args: results: API Response from Core Reporting API.", "escape. Returns: A string that has the first character escaped", "coding=utf-8 # Copyright 2015 Google Inc. All rights reserved. #", "totals for all results matched by the query. This is", "contain sampled data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs", "ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): \"\"\"Outputs whether the resuls", "to output to file_name. Args: file_name: string The name of", "def OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals for all results matched", "interface for this object provides two methods, writerow and writerow,", "which accepts a list or a list of lists respectively", "Args: file_name: string The name of the file to output", "compliance with the License. # You may obtain a copy", "instantiated object to output to the screen. UnicodeWriter(): Utf-8 encodes", "target stream self.stream.write(data) # empty queue self.queue.truncate(0) # pylint: disable=g-bad-name", "agreed to in writing, software # distributed under the License", "position to a row of empty strings. Args: results: API", "position index to output the totals in the right columns.", "in row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results):", "it is special. \"\"\" if input_value and input_value[0] in SPECIAL_CHARS:", "query.iteritems(): if type(value) == types.ListType: value = ','.join(value) else: value", "the sum of the values returned in the response. This", "distributed under the License is distributed on an \"AS IS\"", "data in the Data Export API. Args: results: The response", "License. \"\"\"Utility to convert a Data Export API reponse into", "output a the data feed as tabular data.\"\"\" def __init__(self,", "string that has the first character escaped if it is", "disable=g-bad-name def writerow(self, row): \"\"\"Writes a CSV row. Args: row:", "= str(value) value = ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results):", "else: value = str(value) value = ExcelEscape(value) self.writer.writerow([key, value]) def", "= index # Create a row of empty strings the", "Export API. This uses the writer object to output the", "] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals for all", "limitations under the License. \"\"\"Utility to convert a Data Export", "* len(headers) # Use the position index to output the", "express or implied. # See the License for the specific", "to file_name. Args: file_name: string The name of the file", "except in compliance with the License. # You may obtain", "in the given encoding. \"\"\" def __init__(self, f, dialect=csv.excel, encoding='utf-8',", "the metric name and the value is the total. To", "if input_value and input_value[0] in SPECIAL_CHARS: return \"'\" + input_value", "where the key is the metric name and the value", "For View (Profile): ', profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs the", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "in the table.\"\"\" # Replace any first characters that have", "\"\"\"Returns a ExportPrinter object to output to file_name. Args: file_name:", "has the first character escaped if it is special. \"\"\"", "Args: input_value: string The value to escape. Returns: A string", "'do' row_text = 'These results %s contain sampled data.' %", "newly created ExportPrinter object. \"\"\" my_handle = open(file_name) writer =", "sampled_text = 'do' row_text = 'These results %s contain sampled", "writing, software # distributed under the License is distributed on", "OutputHeaders(self, results): \"\"\"Outputs all the dimension and metric names in", "to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f):", "to std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper", "you may not use this file except in compliance with", "'These results %s contain sampled data.' % sampled_text self.writer.writerow([row_text]) def", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "input_value: string The value to escape. Returns: A string that", "UTF-8 output from the queue ... data = self.queue.getvalue() data", "= writer def Output(self, results): \"\"\"Outputs formatted rows of data", "need to be escaped. SPECIAL_CHARS = ('+', '-', '/', '*',", "\"\"\"A CSV writer which uses the csv module to output", "accepts a list or a list of lists respectively and", "'' info = results.get('profileInfo') if info: profile_name = info.get('profileName') self.writer.writerow(['Report", "results): \"\"\"Outputs whether the resuls have been sampled.\"\"\" sampled_text =", "row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs all the rows in", "the dimension and metric names in order.\"\"\" row = []", "results %s contain sampled data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self,", "class UnicodeWriter(object): \"\"\"A CSV writer which uses the csv module", "TSV. This provides utitlites to both print TSV files to", "that were matched.\"\"\" items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output", "range(0, len(headers)): header = headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')]", "and reencode it into the target encoding data = self.encoder.encode(data)", "Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results)", "export API. \"\"\" if not results.get('rows'): self.writer.writerow('No Results found') else:", "results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): index = metric_index[metric_name] row[index]", "value to escape. Returns: A string that has the first", "as directly to a file. This logic handles all the", "input_value and input_value[0] in SPECIAL_CHARS: return \"'\" + input_value return", "needed. \"\"\" self.writer = writer def Output(self, results): \"\"\"Outputs formatted", "\"\"\" my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer)", "CONDITIONS OF ANY KIND, either express or implied. # See", "def OutputRows(self, results): \"\"\"Outputs all the rows in the table.\"\"\"", "the total. To align these totals in the proper columns,", "totals in the same columns as the headers are printed.", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "# pylint: disable=g-bad-name def writerow(self, row): \"\"\"Writes a CSV row.", "results.get('profileInfo') if info: profile_name = info.get('profileName') self.writer.writerow(['Report For View (Profile):", "data = self.encoder.encode(data) # write to the target stream self.stream.write(data)", "= results.get('query') for key, value in query.iteritems(): if type(value) ==", "out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs how many rows were", "for all results matched by the query. This is not", "position index. metric_index = {} headers = results.get('columnHeaders') for index", "the same length as the header. row = [''] *", "the target stream self.stream.write(data) # empty queue self.queue.truncate(0) # pylint:", "and metric names in order.\"\"\" row = [] for header", "Excel. Args: input_value: string The value to escape. Returns: A", "items], ['Rows Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs", "std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to", "[''] * len(headers) # Use the position index to output", "Response from Core Reporting API. \"\"\" # Create the metric", "this object provides two methods, writerow and writerow, which accepts", "used:']) query = results.get('query') for key, value in query.iteritems(): if", "both print TSV files to the standard output as well", "to output the data in the Data Export API. Args:", "values returned in the response. This will align the metric", "columns, a position index of the metric name and it's", "('+', '-', '/', '*', '=') # TODO(nm): Test leading numbers.", "methods, writerow and writerow, which accepts a list or a", "were returned vs rows that were matched.\"\"\" items = str(results.get('itemsPerPage'))", "the standard output as well as directly to a file.", "ExportPrinter(): Converts the Data Export API response into tabular data.", "OutputRowCounts(self, results): \"\"\"Outputs how many rows were returned vs rows", "position index of the metric name and it's position in", "Data Export API reponse into TSV. This provides utitlites to", "from Core Reporting API. \"\"\" # Create the metric position", "first character of a string if it is special in", "# Redirect output to a queue self.queue = StringIO.StringIO() self.writer", "Data Export API response into tabular data. \"\"\" __author__ =", "order.\"\"\" row = [] for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row)", "from the queue ... data = self.queue.getvalue() data = data.decode('utf-8')", "= csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)()", "value in query.iteritems(): if type(value) == types.ListType: value = ','.join(value)", "# A list of special characters that need to be", "codecs import csv import StringIO import sys import types #", "OR CONDITIONS OF ANY KIND, either express or implied. #", "API. \"\"\" # Create the metric position index. metric_index =", "GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object to output to std.stdout.\"\"\" writer", "to the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for s in row])", "key is the metric name and the value is the", "length as the header. row = [''] * len(headers) #", "conversion. GetTsvFilePrinter: Returns an instantiated object to output to files.", "the first character escaped if it is special. \"\"\" if", "to a file. This logic handles all the utf-8 conversion.", "the License is distributed on an \"AS IS\" BASIS, #", "else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([])", "to the target stream self.stream.write(data) # empty queue self.queue.truncate(0) #", "= [ ['Rows Returned', items], ['Rows Matched', matched] ] self.writer.writerows(output)", "\"\"\"Utility class to output a the data feed as tabular", "= [''] * len(headers) # Use the position index to", "proper columns, a position index of the metric name and", "self.queue.getvalue() data = data.decode('utf-8') # ... and reencode it into", "ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs how many rows", "file \"f\", which is encoded in the given encoding. \"\"\"", "returned in the response. This will align the metric totals", "Create a row of empty strings the same length as", "compatible formats. Will write rows to CSV file \"f\", which", "queue self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream", "special. \"\"\" if input_value and input_value[0] in SPECIAL_CHARS: return \"'\"", "dimension and metric names in order.\"\"\" row = [] for", "ExportPrinter(writer) # Wrapper to output to utf-8. Taken mostly /", "totals are added by position to a row of empty", "# Wrapper to output to utf-8. Taken mostly / directly", "from Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV writer", "to write. \"\"\" for row in rows: self.writerow(row) class ExportPrinter(object):", "position in the table is first created. Then the totals", "in Excel. Args: input_value: string The value to escape. Returns:", "vs rows that were matched.\"\"\" items = str(results.get('itemsPerPage')) matched =", "'-', '/', '*', '=') # TODO(nm): Test leading numbers. def", "first created. Then the totals are added by position to", "law or agreed to in writing, software # distributed under", "self.encoder.encode(data) # write to the target stream self.stream.write(data) # empty", "created. Then the totals are added by position to a", "a position index of the metric name and it's position", "# TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter", "output from the queue ... data = self.queue.getvalue() data =", "= metric_total self.writer.writerows([['Totals For All Rows Matched'], row]) def ExcelEscape(input_value):", "totals in the right columns. totals = results.get('totalsForAllResults') for metric_name,", "returned vs rows that were matched.\"\"\" items = str(results.get('itemsPerPage')) matched", "class. Args: writer: Typically an instance of UnicodeWriter. The interface", "__init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output to a", "self.writer.writerow(['Report For View (Profile): ', profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs", "with '= for row in results.get('rows'): out_row = [] for", "metric names in order.\"\"\" row = [] for header in", "if info: profile_name = info.get('profileName') self.writer.writerow(['Report For View (Profile): ',", "utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object to output to", "headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index # Create", "value = str(value) value = ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self,", "header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index # Create a row", "and # limitations under the License. \"\"\"Utility to convert a", "'/', '*', '=') # TODO(nm): Test leading numbers. def GetTsvFilePrinter(file_name):", "\"\"\" self.writer = writer def Output(self, results): \"\"\"Outputs formatted rows", "types.ListType: value = ','.join(value) else: value = str(value) value =", "OutputContainsSampledData(self, results): \"\"\"Outputs whether the resuls have been sampled.\"\"\" sampled_text", "= metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For All Rows Matched'],", "= str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output = [ ['Rows Returned',", "'= for row in results.get('rows'): out_row = [] for cell", "may obtain a copy of the License at # #", "A list of special characters that need to be escaped.", "Returns an instantiated object to output to the screen. UnicodeWriter():", "of data retrieved from the Data Export API. This uses", "output. Args: rows: list of rows to write. \"\"\" for", "def ExcelEscape(input_value): \"\"\"Escapes the first character of a string if", "View (Profile): ', profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs the query", "self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals for all results", "list The row to write to the CSV output. \"\"\"", "GetTsvScreenPrinter: Returns an instantiated object to output to the screen.", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object to output to", "columns. totals = results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): index", "CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for s in row]) # Fetch", "handles all the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object", "as needed. \"\"\" self.writer = writer def Output(self, results): \"\"\"Outputs", "may not use this file except in compliance with the", "language governing permissions and # limitations under the License. \"\"\"Utility", "docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV writer which uses", "files to the standard output as well as directly to", "def OutputContainsSampledData(self, results): \"\"\"Outputs whether the resuls have been sampled.\"\"\"", "escaped if it is special. \"\"\" if input_value and input_value[0]", "total. To align these totals in the proper columns, a", "of empty strings the same length as the header. row", "self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs the profile name along", "reencode it into the target encoding data = self.encoder.encode(data) #", "writer): \"\"\"Initializes the class. Args: writer: Typically an instance of", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self, row): \"\"\"Writes a CSV", "self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs the profile name", "Args: rows: list of rows to write. \"\"\" for row", "info = results.get('profileInfo') if info: profile_name = info.get('profileName') self.writer.writerow(['Report For", "Data Export API. This uses the writer object to output", "['Rows Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs the", "this file except in compliance with the License. # You", "results.get('containsSampledData'): sampled_text = 'do' row_text = 'These results %s contain", "cell in row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self,", "for cell in row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def", "of rows to write. \"\"\" for row in rows: self.writerow(row)", "index. metric_index = {} headers = results.get('columnHeaders') for index in", "This logic handles all the utf-8 conversion. GetTsvFilePrinter: Returns an", "file. This logic handles all the utf-8 conversion. GetTsvFilePrinter: Returns", "def Output(self, results): \"\"\"Outputs formatted rows of data retrieved from", "StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder", "self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs all the dimension and metric", "strings the same length as the header. row = ['']", "UnicodeWriter(object): \"\"\"A CSV writer which uses the csv module to", "value = ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): \"\"\"Outputs whether", "to output a the data feed as tabular data.\"\"\" def", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "\"\"\"Outputs whether the resuls have been sampled.\"\"\" sampled_text = 'do", "(Profile): ', profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs the query used.\"\"\"", "two methods, writerow and writerow, which accepts a list or", "of a string if it is special in Excel. Args:", "# # Licensed under the Apache License, Version 2.0 (the", "SPECIAL_CHARS = ('+', '-', '/', '*', '=') # TODO(nm): Test", "Google Inc. All rights reserved. # # Licensed under the", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "leading numbers. def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object to output", "= results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): index = metric_index[metric_name]", "a ExportPrinter object to output to std.stdout.\"\"\" writer = UnicodeWriter(f,", "any first characters that have an = with '= for", "value]) def OutputContainsSampledData(self, results): \"\"\"Outputs whether the resuls have been", "Replace any first characters that have an = with '=", "from the Data Export API. This uses the writer object", "the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the Data", "character of a string if it is special in Excel.", "import csv import StringIO import sys import types # A", "rows of data retrieved from the Data Export API. This", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "**kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name", "to both print TSV files to the standard output as", "a CSV row. Args: row: list The row to write", "stream self.stream.write(data) # empty queue self.queue.truncate(0) # pylint: disable=g-bad-name def", "class to output a the data feed as tabular data.\"\"\"", "value is the total. To align these totals in the", "csv compatible formats. Will write rows to CSV file \"f\",", "output to files. GetTsvScreenPrinter: Returns an instantiated object to output", "rights reserved. # # Licensed under the Apache License, Version", "self.writerow(row) class ExportPrinter(object): \"\"\"Utility class to output a the data", "class ExportPrinter(object): \"\"\"Utility class to output a the data feed", "GetTsvFilePrinter: Returns an instantiated object to output to files. GetTsvScreenPrinter:", "self.writer = writer def Output(self, results): \"\"\"Outputs formatted rows of", "is first created. Then the totals are added by position", "metric_index = {} headers = results.get('columnHeaders') for index in range(0,", "= results.get('columnHeaders') for index in range(0, len(headers)): header = headers[index]", "empty queue self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self, rows): \"\"\"Writes", "sampled.\"\"\" sampled_text = 'do not' if results.get('containsSampledData'): sampled_text = 'do'", "and it's position in the table is first created. Then", "an instantiated object to output to files. GetTsvScreenPrinter: Returns an", "write. \"\"\" for row in rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility", "of UnicodeWriter. The interface for this object provides two methods,", "for this object provides two methods, writerow and writerow, which", "self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results):", "permissions and # limitations under the License. \"\"\"Utility to convert", "the query used.\"\"\" self.writer.writerow(['These query parameters were used:']) query =", "items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output = [ ['Rows", "totals = results.get('totalsForAllResults') for metric_name, metric_total in totals.iteritems(): index =", "Returns: A string that has the first character escaped if", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs all the dimension and", "or a list of lists respectively and process them as", "writerow(self, row): \"\"\"Writes a CSV row. Args: row: list The", "def writerows(self, rows): \"\"\"Writes rows for CSV output. Args: rows:", "Utf-8 encodes output. ExportPrinter(): Converts the Data Export API response", "Export API. Args: results: The response from the data export", "out_row = [] for cell in row: cell = ExcelEscape(cell)", "the totals for all results matched by the query. This", "to write to the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for s", "in the response. This will align the metric totals in", "align the metric totals in the same columns as the", "write to the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for s in", "to output to. Returns: The newly created ExportPrinter object. \"\"\"", "for key, value in query.iteritems(): if type(value) == types.ListType: value", "is special. \"\"\" if input_value and input_value[0] in SPECIAL_CHARS: return", "if header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index # Create a", "self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream =", "or implied. # See the License for the specific language", "rows that were matched.\"\"\" items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults'))", "the table is first created. Then the totals are added", "row in results.get('rows'): out_row = [] for cell in row:", "a ExportPrinter object to output to file_name. Args: file_name: string", "A string that has the first character escaped if it", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "results): \"\"\"Outputs formatted rows of data retrieved from the Data", "return ExportPrinter(writer) # Wrapper to output to utf-8. Taken mostly", "Wrapper to output to utf-8. Taken mostly / directly from", "Args: row: list The row to write to the CSV", "profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These query", "list of special characters that need to be escaped. SPECIAL_CHARS", "UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object", "self.writer.writerows([['Totals For All Rows Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes the", "provides two methods, writerow and writerow, which accepts a list", "self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs", "writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a", "table.\"\"\" # Replace any first characters that have an =", "it's position in the table is first created. Then the", "for index in range(0, len(headers)): header = headers[index] if header.get('columnType')", "metric_total in totals.iteritems(): index = metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "Args: writer: Typically an instance of UnicodeWriter. The interface for", "'api.nickm@ (<NAME>)' import codecs import csv import StringIO import sys", "value = ','.join(value) else: value = str(value) value = ExcelEscape(value)", "std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns", "output to. Returns: The newly created ExportPrinter object. \"\"\" my_handle", "and the value is the total. To align these totals", "to output to utf-8. Taken mostly / directly from Python", "the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for s in row]) #", "= ','.join(value) else: value = str(value) value = ExcelEscape(value) self.writer.writerow([key,", "matched = str(results.get('totalResults')) output = [ ['Rows Returned', items], ['Rows", "writer which uses the csv module to output csv compatible", "target encoding data = self.encoder.encode(data) # write to the target", "results): \"\"\"Outputs the totals for all results matched by the", "(the \"License\"); # you may not use this file except", "output to the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts", "names in order.\"\"\" row = [] for header in results.get('columnHeaders'):", "str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output = [ ['Rows Returned', items],", "the metric position index. metric_index = {} headers = results.get('columnHeaders')", "# you may not use this file except in compliance", "ExportPrinter(object): \"\"\"Utility class to output a the data feed as", "string The value to escape. Returns: A string that has", "a list or a list of lists respectively and process", "\"\"\" if input_value and input_value[0] in SPECIAL_CHARS: return \"'\" +", "import codecs import csv import StringIO import sys import types", "= self.queue.getvalue() data = data.decode('utf-8') # ... and reencode it", "index in range(0, len(headers)): header = headers[index] if header.get('columnType') ==", "encoding. \"\"\" def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect", "for s in row]) # Fetch UTF-8 output from the", "info.get('profileName') self.writer.writerow(['Report For View (Profile): ', profile_name]) def OutputQueryInfo(self, results):", "the metric totals in the same columns as the headers", "ExportPrinter object. \"\"\" my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab')", "[ ['Rows Returned', items], ['Rows Matched', matched] ] self.writer.writerows(output) def", "matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals for", "types # A list of special characters that need to", "self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results)", "in the same columns as the headers are printed. The", "encodes output. ExportPrinter(): Converts the Data Export API response into", "are printed. The totals are stored as a dict, where", "[] for cell in row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row)", "= 'These results %s contain sampled data.' % sampled_text self.writer.writerow([row_text])", "def GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object to output to file_name.", "To align these totals in the proper columns, a position", "'do not' if results.get('containsSampledData'): sampled_text = 'do' row_text = 'These", "UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the Data Export API", "\"\"\" def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output", "API. \"\"\" if not results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results)", "# # Unless required by applicable law or agreed to", "of the values returned in the response. This will align", "\"\"\"Outputs all the rows in the table.\"\"\" # Replace any", "self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def", "object. \"\"\" my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return", "= data.decode('utf-8') # ... and reencode it into the target", "__init__(self, writer): \"\"\"Initializes the class. Args: writer: Typically an instance", "self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): \"\"\"Outputs whether the resuls have", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs all the rows", "a string if it is special in Excel. Args: input_value:", "CSV writer which uses the csv module to output csv", "Fetch UTF-8 output from the queue ... data = self.queue.getvalue()", "data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results): \"\"\"Outputs all the", "Version 2.0 (the \"License\"); # you may not use this", "results.get('query') for key, value in query.iteritems(): if type(value) == types.ListType:", "parameters were used:']) query = results.get('query') for key, value in", "self.writer.writerow([s.encode('utf-8') for s in row]) # Fetch UTF-8 output from", "index to output the totals in the right columns. totals", "under the License. \"\"\"Utility to convert a Data Export API", "respectively and process them as needed. \"\"\" self.writer = writer", "writer def Output(self, results): \"\"\"Outputs formatted rows of data retrieved", "metric name and the value is the total. To align", "a queue self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds)", "self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder =", "metric_index[header.get('name')] = index # Create a row of empty strings", "the csv module to output csv compatible formats. Will write", "implied. # See the License for the specific language governing", "metric_name, metric_total in totals.iteritems(): index = metric_index[metric_name] row[index] = metric_total", "row to write to the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8') for", "under the Apache License, Version 2.0 (the \"License\"); # you", "self.writer.writerow(['These query parameters were used:']) query = results.get('query') for key,", "pylint: disable=g-bad-name def writerow(self, row): \"\"\"Writes a CSV row. Args:", "self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self, rows): \"\"\"Writes rows for", "f self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self, row):", "the query. This is not the sum of the values", "query = results.get('query') for key, value in query.iteritems(): if type(value)", "\"\"\"Returns a ExportPrinter object to output to std.stdout.\"\"\" writer =", "results.get('rows'): out_row = [] for cell in row: cell =", "output. \"\"\" self.writer.writerow([s.encode('utf-8') for s in row]) # Fetch UTF-8", "for row in results.get('rows'): out_row = [] for cell in", "UnicodeWriter. The interface for this object provides two methods, writerow", "formats. Will write rows to CSV file \"f\", which is", "row_text = 'These results %s contain sampled data.' % sampled_text", "name of the file to output to. Returns: The newly", "row = [''] * len(headers) # Use the position index", "All rights reserved. # # Licensed under the Apache License,", "by applicable law or agreed to in writing, software #", "response into tabular data. \"\"\" __author__ = 'api.nickm@ (<NAME>)' import", "self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self, row): \"\"\"Writes", "in range(0, len(headers)): header = headers[index] if header.get('columnType') == 'METRIC':", "Export API response into tabular data. \"\"\" __author__ = 'api.nickm@", "which is encoded in the given encoding. \"\"\" def __init__(self,", "Reporting API. \"\"\" # Create the metric position index. metric_index", "list of rows to write. \"\"\" for row in rows:", "output to std.stdout.\"\"\" writer = UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def", "data.\"\"\" def __init__(self, writer): \"\"\"Initializes the class. Args: writer: Typically", "the resuls have been sampled.\"\"\" sampled_text = 'do not' if", "are added by position to a row of empty strings.", "csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f self.encoder = codecs.getincrementalencoder(encoding)() #", "\"\"\"Utility to convert a Data Export API reponse into TSV.", "GetTsvFilePrinter(file_name): \"\"\"Returns a ExportPrinter object to output to file_name. Args:", "Converts the Data Export API response into tabular data. \"\"\"", "results): \"\"\"Outputs how many rows were returned vs rows that", "these totals in the proper columns, a position index of", "and writerow, which accepts a list or a list of", "as the header. row = [''] * len(headers) # Use", "metric name and it's position in the table is first", "the response. This will align the metric totals in the", "The newly created ExportPrinter object. \"\"\" my_handle = open(file_name) writer", "\"\"\"Writes a CSV row. Args: row: list The row to", "== types.ListType: value = ','.join(value) else: value = str(value) value", "results): \"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These query parameters were used:'])", "name and the value is the total. To align these", "{} headers = results.get('columnHeaders') for index in range(0, len(headers)): header", "the file to output to. Returns: The newly created ExportPrinter", "to escape. Returns: A string that has the first character", "# Create a row of empty strings the same length", "to output to files. GetTsvScreenPrinter: Returns an instantiated object to", "= results.get('profileInfo') if info: profile_name = info.get('profileName') self.writer.writerow(['Report For View", "cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs how", "encoded in the given encoding. \"\"\" def __init__(self, f, dialect=csv.excel,", "special characters that need to be escaped. SPECIAL_CHARS = ('+',", "row]) def ExcelEscape(input_value): \"\"\"Escapes the first character of a string", "from the data export API. \"\"\" if not results.get('rows'): self.writer.writerow('No", "print TSV files to the standard output as well as", "row): \"\"\"Writes a CSV row. Args: row: list The row", "not results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([])", "row[index] = metric_total self.writer.writerows([['Totals For All Rows Matched'], row]) def", "row: cell = ExcelEscape(cell) out_row.append(cell) self.writer.writerow(out_row) def OutputRowCounts(self, results): \"\"\"Outputs", "Core Reporting API. \"\"\" # Create the metric position index.", "instance of UnicodeWriter. The interface for this object provides two", "standard output as well as directly to a file. This", "self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([])", "ExportPrinter object to output to file_name. Args: file_name: string The", "to utf-8. Taken mostly / directly from Python docs: #", "row. Args: row: list The row to write to the", "TSV files to the standard output as well as directly", "Returned', items], ['Rows Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self, results):", "of special characters that need to be escaped. SPECIAL_CHARS =", "header. row = [''] * len(headers) # Use the position", "Args: results: The response from the data export API. \"\"\"", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "matched.\"\"\" items = str(results.get('itemsPerPage')) matched = str(results.get('totalResults')) output = [", "well as directly to a file. This logic handles all", "will align the metric totals in the same columns as", "Unless required by applicable law or agreed to in writing,", "the class. Args: writer: Typically an instance of UnicodeWriter. The", "columns as the headers are printed. The totals are stored", "the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object to output", "as well as directly to a file. This logic handles", "Create the metric position index. metric_index = {} headers =", "to the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter(): Converts the", "Copyright 2015 Google Inc. All rights reserved. # # Licensed", "the rows in the table.\"\"\" # Replace any first characters", "a row of empty strings the same length as the", "object to output the data in the Data Export API.", "= UnicodeWriter(sys.stdout, dialect='excel-tab') return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter", "the specific language governing permissions and # limitations under the", "is the total. To align these totals in the proper", "= self.encoder.encode(data) # write to the target stream self.stream.write(data) #", "write to the target stream self.stream.write(data) # empty queue self.queue.truncate(0)", "__author__ = 'api.nickm@ (<NAME>)' import codecs import csv import StringIO", "\"\"\" self.writer.writerow([s.encode('utf-8') for s in row]) # Fetch UTF-8 output", "an instantiated object to output to the screen. UnicodeWriter(): Utf-8", "row of empty strings the same length as the header.", "the table.\"\"\" # Replace any first characters that have an", "# http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV writer which uses the", "applicable law or agreed to in writing, software # distributed", "# Copyright 2015 Google Inc. All rights reserved. # #", "metric_total self.writer.writerows([['Totals For All Rows Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes", "Inc. All rights reserved. # # Licensed under the Apache", "csv import StringIO import sys import types # A list", "# Use the position index to output the totals in", "encoding data = self.encoder.encode(data) # write to the target stream", "# coding=utf-8 # Copyright 2015 Google Inc. All rights reserved.", "# empty queue self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self, rows):", "This provides utitlites to both print TSV files to the", "it into the target encoding data = self.encoder.encode(data) # write", "of the file to output to. Returns: The newly created", "printed. The totals are stored as a dict, where the", "= headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index #", "output to file_name. Args: file_name: string The name of the", "character escaped if it is special. \"\"\" if input_value and", "in writing, software # distributed under the License is distributed", "output = [ ['Rows Returned', items], ['Rows Matched', matched] ]", "query. This is not the sum of the values returned", "along with the qurey.\"\"\" profile_name = '' info = results.get('profileInfo')", "be escaped. SPECIAL_CHARS = ('+', '-', '/', '*', '=') #", "instantiated object to output to files. GetTsvScreenPrinter: Returns an instantiated", "The row to write to the CSV output. \"\"\" self.writer.writerow([s.encode('utf-8')", "into TSV. This provides utitlites to both print TSV files", "= 'api.nickm@ (<NAME>)' import codecs import csv import StringIO import", "CSV output. Args: rows: list of rows to write. \"\"\"", "= 'do' row_text = 'These results %s contain sampled data.'", "OutputTotalsForAllResults(self, results): \"\"\"Outputs the totals for all results matched by", "rows for CSV output. Args: rows: list of rows to", "CSV file \"f\", which is encoded in the given encoding.", "rows: list of rows to write. \"\"\" for row in", "all the rows in the table.\"\"\" # Replace any first", "is not the sum of the values returned in the", "metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For All Rows Matched'], row])", "Taken mostly / directly from Python docs: # http://docs.python.org/library/csv.html class", "GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object to output to std.stdout.\"\"\" writer", "for row in rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility class to", "name along with the qurey.\"\"\" profile_name = '' info =", "dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output to utf-8. Taken", "# write to the target stream self.stream.write(data) # empty queue", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "data = self.queue.getvalue() data = data.decode('utf-8') # ... and reencode", "process them as needed. \"\"\" self.writer = writer def Output(self,", "License, Version 2.0 (the \"License\"); # you may not use", "\"\"\"Initializes the class. Args: writer: Typically an instance of UnicodeWriter.", "# You may obtain a copy of the License at", "writerow, which accepts a list or a list of lists", "to a row of empty strings. Args: results: API Response", "s in row]) # Fetch UTF-8 output from the queue", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "all the utf-8 conversion. GetTsvFilePrinter: Returns an instantiated object to", "= [] for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self,", "type(value) == types.ListType: value = ','.join(value) else: value = str(value)", "the key is the metric name and the value is", "['Rows Returned', items], ['Rows Matched', matched] ] self.writer.writerows(output) def OutputTotalsForAllResults(self,", "object to output to std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab') return", "def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object to output to std.stdout.\"\"\"", "data export API. \"\"\" if not results.get('rows'): self.writer.writerow('No Results found')", "is the metric name and the value is the total.", "# pylint: disable=g-bad-name def writerows(self, rows): \"\"\"Writes rows for CSV", "\"\"\" # Create the metric position index. metric_index = {}", "module to output csv compatible formats. Will write rows to", "with the qurey.\"\"\" profile_name = '' info = results.get('profileInfo') if", "... data = self.queue.getvalue() data = data.decode('utf-8') # ... and", "self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results)", "totals.iteritems(): index = metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For All", "object to output to files. GetTsvScreenPrinter: Returns an instantiated object", "header = headers[index] if header.get('columnType') == 'METRIC': metric_index[header.get('name')] = index", "= ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): \"\"\"Outputs whether the", "the License for the specific language governing permissions and #", "\"f\", which is encoded in the given encoding. \"\"\" def", "The value to escape. Returns: A string that has the", "same length as the header. row = [''] * len(headers)", "Apache License, Version 2.0 (the \"License\"); # you may not", "= '' info = results.get('profileInfo') if info: profile_name = info.get('profileName')", "self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs all the rows in the", "query parameters were used:']) query = results.get('query') for key, value", "data feed as tabular data.\"\"\" def __init__(self, writer): \"\"\"Initializes the", "either express or implied. # See the License for the", "ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter object to output to", "writer object to output the data in the Data Export", "file_name. Args: file_name: string The name of the file to", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "\"\"\"Outputs the totals for all results matched by the query.", "to output to std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer)", "align these totals in the proper columns, a position index", "index # Create a row of empty strings the same", "by the query. This is not the sum of the", "in rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility class to output a", "\"\"\" if not results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([])", "an = with '= for row in results.get('rows'): out_row =", "directly from Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV", "uses the csv module to output csv compatible formats. Will", "in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs all the", "profile_name = info.get('profileName') self.writer.writerow(['Report For View (Profile): ', profile_name]) def", "to output csv compatible formats. Will write rows to CSV", "str(value) value = ExcelEscape(value) self.writer.writerow([key, value]) def OutputContainsSampledData(self, results): \"\"\"Outputs", "mostly / directly from Python docs: # http://docs.python.org/library/csv.html class UnicodeWriter(object):", "a Data Export API reponse into TSV. This provides utitlites", "data = data.decode('utf-8') # ... and reencode it into the", "import types # A list of special characters that need", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "governing permissions and # limitations under the License. \"\"\"Utility to", "them as needed. \"\"\" self.writer = writer def Output(self, results):", "%s contain sampled data.' % sampled_text self.writer.writerow([row_text]) def OutputHeaders(self, results):", "my_handle = open(file_name) writer = UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def", "key, value in query.iteritems(): if type(value) == types.ListType: value =", "', profile_name]) def OutputQueryInfo(self, results): \"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These", "a row of empty strings. Args: results: API Response from", "','.join(value) else: value = str(value) value = ExcelEscape(value) self.writer.writerow([key, value])", "http://docs.python.org/library/csv.html class UnicodeWriter(object): \"\"\"A CSV writer which uses the csv", "\"\"\"Outputs formatted rows of data retrieved from the Data Export", "metric position index. metric_index = {} headers = results.get('columnHeaders') for", "in totals.iteritems(): index = metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For", "= {} headers = results.get('columnHeaders') for index in range(0, len(headers)):", "self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs the", "strings. Args: results: API Response from Core Reporting API. \"\"\"", "index = metric_index[metric_name] row[index] = metric_total self.writer.writerows([['Totals For All Rows", "**kwds): # Redirect output to a queue self.queue = StringIO.StringIO()", "output the data in the Data Export API. Args: results:", "as a dict, where the key is the metric name", "writer: Typically an instance of UnicodeWriter. The interface for this", "the data in the Data Export API. Args: results: The", "# Fetch UTF-8 output from the queue ... data =", "which uses the csv module to output csv compatible formats.", "= UnicodeWriter(my_handle, dialect='excel-tab') return ExportPrinter(writer) def GetTsvScreenPrinter(): \"\"\"Returns a ExportPrinter", "results: The response from the data export API. \"\"\" if", "of the metric name and it's position in the table", "found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results)", "Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes the first character of a", "file to output to. Returns: The newly created ExportPrinter object.", "All Rows Matched'], row]) def ExcelEscape(input_value): \"\"\"Escapes the first character", "a the data feed as tabular data.\"\"\" def __init__(self, writer):", "= StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect, **kwds) self.stream = f", "API. This uses the writer object to output the data", "\"License\"); # you may not use this file except in", "rows): \"\"\"Writes rows for CSV output. Args: rows: list of", "stored as a dict, where the key is the metric", "= f self.encoder = codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self,", "as tabular data.\"\"\" def __init__(self, writer): \"\"\"Initializes the class. Args:", "self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self, results): \"\"\"Outputs the profile", "results): \"\"\"Outputs all the rows in the table.\"\"\" # Replace", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "= 'do not' if results.get('containsSampledData'): sampled_text = 'do' row_text =", "OutputQueryInfo(self, results): \"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These query parameters were", "for header in results.get('columnHeaders'): row.append(header.get('name')) self.writer.writerow(row) def OutputRows(self, results): \"\"\"Outputs", "and input_value[0] in SPECIAL_CHARS: return \"'\" + input_value return input_value", "\"\"\"Outputs the query used.\"\"\" self.writer.writerow(['These query parameters were used:']) query", "output the totals in the right columns. totals = results.get('totalsForAllResults')", "output to std.stdout.\"\"\" writer = UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) #", "characters that need to be escaped. SPECIAL_CHARS = ('+', '-',", "The name of the file to output to. Returns: The", "the writer object to output the data in the Data", "# distributed under the License is distributed on an \"AS", "convert a Data Export API reponse into TSV. This provides", "dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output to a queue self.queue", "escaped. SPECIAL_CHARS = ('+', '-', '/', '*', '=') # TODO(nm):", "lists respectively and process them as needed. \"\"\" self.writer =", "self.writer.writerow([]) self.OutputQueryInfo(results) self.writer.writerow([]) self.OutputHeaders(results) self.OutputRows(results) self.writer.writerow([]) self.OutputRowCounts(results) self.OutputTotalsForAllResults(results) def OutputProfileName(self,", "# Unless required by applicable law or agreed to in", "write rows to CSV file \"f\", which is encoded in", "API reponse into TSV. This provides utitlites to both print", "rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility class to output a the", "the header. row = [''] * len(headers) # Use the", "\"\"\"Escapes the first character of a string if it is", "row in rows: self.writerow(row) class ExportPrinter(object): \"\"\"Utility class to output", "The response from the data export API. \"\"\" if not", "that need to be escaped. SPECIAL_CHARS = ('+', '-', '/',", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "that have an = with '= for row in results.get('rows'):", "API response into tabular data. \"\"\" __author__ = 'api.nickm@ (<NAME>)'", "= with '= for row in results.get('rows'): out_row = []", "added by position to a row of empty strings. Args:", "== 'METRIC': metric_index[header.get('name')] = index # Create a row of", "feed as tabular data.\"\"\" def __init__(self, writer): \"\"\"Initializes the class.", "results.get('rows'): self.writer.writerow('No Results found') else: self.OutputProfileName(results) self.writer.writerow([]) self.OutputContainsSampledData(results) self.writer.writerow([]) self.OutputQueryInfo(results)", "return ExportPrinter(writer) def GetTsvStringPrinter(f): \"\"\"Returns a ExportPrinter object to output", "= codecs.getincrementalencoder(encoding)() # pylint: disable=g-bad-name def writerow(self, row): \"\"\"Writes a", "in query.iteritems(): if type(value) == types.ListType: value = ','.join(value) else:", "tabular data.\"\"\" def __init__(self, writer): \"\"\"Initializes the class. Args: writer:", "that has the first character escaped if it is special.", "You may obtain a copy of the License at #", "reponse into TSV. This provides utitlites to both print TSV", "have been sampled.\"\"\" sampled_text = 'do not' if results.get('containsSampledData'): sampled_text", "pylint: disable=g-bad-name def writerows(self, rows): \"\"\"Writes rows for CSV output.", "empty strings the same length as the header. row =", "API. Args: results: The response from the data export API.", "file_name: string The name of the file to output to.", "= UnicodeWriter(f, dialect='excel-tab') return ExportPrinter(writer) # Wrapper to output to", "of lists respectively and process them as needed. \"\"\" self.writer", "def __init__(self, writer): \"\"\"Initializes the class. Args: writer: Typically an", "headers = results.get('columnHeaders') for index in range(0, len(headers)): header =", "queue self.queue.truncate(0) # pylint: disable=g-bad-name def writerows(self, rows): \"\"\"Writes rows", "a file. This logic handles all the utf-8 conversion. GetTsvFilePrinter:", "formatted rows of data retrieved from the Data Export API.", "Data Export API. Args: results: The response from the data", "sum of the values returned in the response. This will", "same columns as the headers are printed. The totals are", "the Apache License, Version 2.0 (the \"License\"); # you may", "to output to the screen. UnicodeWriter(): Utf-8 encodes output. ExportPrinter():", "to. Returns: The newly created ExportPrinter object. \"\"\" my_handle =", "def OutputRowCounts(self, results): \"\"\"Outputs how many rows were returned vs", "first character escaped if it is special. \"\"\" if input_value", "an instance of UnicodeWriter. The interface for this object provides", "def __init__(self, f, dialect=csv.excel, encoding='utf-8', **kwds): # Redirect output to", "to a queue self.queue = StringIO.StringIO() self.writer = csv.writer(self.queue, dialect=dialect,", "data. \"\"\" __author__ = 'api.nickm@ (<NAME>)' import codecs import csv", "first characters that have an = with '= for row", "characters that have an = with '= for row in", "the proper columns, a position index of the metric name", "the totals are added by position to a row of", "row]) # Fetch UTF-8 output from the queue ... data", "all the dimension and metric names in order.\"\"\" row =" ]
[ "fitting and real values. dff = pd.DataFrame() for i in", "zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'),", "Plot loss function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss'])", "# Compare fitting and real values. dff = pd.DataFrame() for", ": <NAME> @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20", "ds_train ds_train = zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential(", "model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot", "-*- coding: utf-8 -*- \"\"\" @Project : RNN_Prediction @Author :", "@Month1 : 5月 @Month2 : 五月 \"\"\" import tushare as", "ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss", "@IDE : PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20", "i in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size,", "loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 )", "ts import tensorflow as tf import pandas as pd from", "= model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') #", "column = 'high' epoches = 300 def batch_dataset(dataset): dataset_batched =", ": PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18", "tf import pandas as pd from sklearn.model_selection import train_test_split import", "= pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19,", "train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val", ": 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1 : 5月", "plt.show() # Compare fitting and real values. dff = pd.DataFrame()", "tensorflow as tf import pandas as pd from sklearn.model_selection import", "loss function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])),", ":][column].plot(style='-o') plt.show() # To predict future 100 business days. dfp", "val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size", "100 business days. dfp = stock_catl.copy() for i in range(100):", "@Time2 : 2020/5/20 13:18 @Month1 : 5月 @Month2 : 五月", "val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:,", "ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32)", "tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return", "plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future", "@Author : <NAME> @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 :", "import train_test_split import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl", "2020/5/20 13:18 @Month1 : 5月 @Month2 : 五月 \"\"\" import", "import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True)", "val = train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) # val", "= tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5,", "import pandas as pd from sklearn.model_selection import train_test_split import matplotlib.pyplot", "return dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data =", "tushare as ts import tensorflow as tf import pandas as", "stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) / \\ (stock_catl.max() -", "RNN_Prediction @Author : <NAME> @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1", "as plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl =", "[ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer", "train, val = train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) #", "13:18 @Month1 : 5月 @Month2 : 五月 \"\"\" import tushare", "'high' epoches = 300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True)", "tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label =", "zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True,", "history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM')", "<gh_stars>0 # -*- coding: utf-8 -*- \"\"\" @Project : RNN_Prediction", "plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and", "batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data", "= 300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched", "coding: utf-8 -*- \"\"\" @Project : RNN_Prediction @Author : <NAME>", "dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:],", "ds_train = zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential( [", "= tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat()", "五月 \"\"\" import tushare as ts import tensorflow as tf", "\\ (stock_catl.max() - stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5)", "= tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ]", "business days. dfp = stock_catl.copy() for i in range(100): pres", "plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl", "utf-8 -*- \"\"\" @Project : RNN_Prediction @Author : <NAME> @Filename:", "= stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size = 30", "tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label =", "9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show()", "tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] )", "dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data).", ":] val = stock_catl.iloc[-60:, :] window_size = 30 column =", "drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data", "axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres, ignore_index=True) dfp[column].plot()", ": RNN_Prediction @Author : <NAME> @Filename: stockPrediction202005201318.py @IDE : PyCharm", "13:18:46 @Time2 : 2020/5/20 13:18 @Month1 : 5月 @Month2 :", "ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) /", "train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True)", "= train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :]", "= 'high' epoches = 300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size,", "real values. dff = pd.DataFrame() for i in range(len(stock_catl) -", "- window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits", ":], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index", "ax.set_yscale('log') plt.show() # Compare fitting and real values. dff =", "dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:,", "dfp = stock_catl.copy() for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1", "stock_catl.copy() for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:],", "ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train) ds_val = zip_ds(val) model", "(stock_catl - stock_catl.mean()) / \\ (stock_catl.max() - stock_catl.min()) # train,", "test_size=0.5) # train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train", "# Plot loss function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])),", "dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train", "history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and real values. dff", "activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer,", "model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff", "pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9))", "pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns)", "activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history", "columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot()", ") optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train,", "= stock_catl.copy() for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 *", "dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32)", "model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9)) ax = plt.gca()", "* window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres,", "axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index =", "= val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :]", "dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show()", "= dff.append(dffits) dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o')", "\\ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label)", "@Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1 :", "range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres,", "ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train", "train = stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size =", "= stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To", "# -*- coding: utf-8 -*- \"\"\" @Project : RNN_Prediction @Author", "] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit(", "epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss function", "dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres, ignore_index=True) dfp[column].plot() plt.show()", "ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train) ds_val", "30 column = 'high' epoches = 300 def batch_dataset(dataset): dataset_batched", "stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future 100 business days.", "ds_val = zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'),", "ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() #", "window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits)", "window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train", "+ window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff =", "tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train) ds_val = zip_ds(val)", "stock_catl.mean()) / \\ (stock_catl.max() - stock_catl.min()) # train, val =", "stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) / \\", "for i in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i +", "# train, val = train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True)", "stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict", "future 100 business days. dfp = stock_catl.copy() for i in", "stock_catl = (stock_catl - stock_catl.mean()) / \\ (stock_catl.max() - stock_catl.min())", "# val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60, :] val =", "sklearn.model_selection import train_test_split import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750')", "stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5) # train =", "(stock_catl.max() - stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5) #", "- stock_catl.mean()) / \\ (stock_catl.max() - stock_catl.min()) # train, val", "\"\"\" @Project : RNN_Prediction @Author : <NAME> @Filename: stockPrediction202005201318.py @IDE", "plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting", "i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres", "= tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label", "= pd.DataFrame() for i in range(len(stock_catl) - window_size): fits =", ":] window_size = 30 column = 'high' epoches = 300", "9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future 100", "-*- \"\"\" @Project : RNN_Prediction @Author : <NAME> @Filename: stockPrediction202005201318.py", "= tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train) ds_val =", "in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres =", "return ds_train ds_train = zip_ds(train) ds_val = zip_ds(val) model =", "train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train = stock_catl.iloc[:-60,", "= stock_catl.iloc[-60:, :] window_size = 30 column = 'high' epoches", "function plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss'])", "range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0)))", "matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl", ") model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9)) ax =", "pandas as pd from sklearn.model_selection import train_test_split import matplotlib.pyplot as", "= model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits = pd.DataFrame(fits, columns=stock_catl.columns)", "dff = pd.DataFrame() for i in range(len(stock_catl) - window_size): fits", "import tushare as ts import tensorflow as tf import pandas", "epoches = 300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return", "days. dfp = stock_catl.copy() for i in range(100): pres =", "stock_catl = ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl -", "@Project : RNN_Prediction @Author : <NAME> @Filename: stockPrediction202005201318.py @IDE :", "from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt stock_catl =", "return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01)", "tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history =", "ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset)", "ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data,", "300 def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched def", "plt.figure(figsize=(19, 9)) ax = plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log')", "zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size,", ": 2020/5/20 13:18 @Month1 : 5月 @Month2 : 五月 \"\"\"", "stock_catl.iloc[-60:, :] window_size = 30 column = 'high' epoches =", "= model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp", "= tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\ window(window_size, shift=1).flat_map(batch_dataset) ds_label", "values. dff = pd.DataFrame() for i in range(len(stock_catl) - window_size):", "\"\"\" import tushare as ts import tensorflow as tf import", "import tensorflow as tf import pandas as pd from sklearn.model_selection", "PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1", "<NAME> @Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20 13:18:46", "pd.DataFrame() for i in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i", "validation_steps=1 ) model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9)) ax", "history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and real", "stock_catl.iloc[:-60, :] val = stock_catl.iloc[-60:, :] window_size = 30 column", "= stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean()) / \\ (stock_catl.max()", "5月 @Month2 : 五月 \"\"\" import tushare as ts import", "def zip_ds(dataset): ds_data = tf.constant(dataset.values, dtype=tf.float32) ds_data = tf.data.Dataset.from_tensor_slices(ds_data). \\", "dff.index = stock_catl.index[window_size:] plt.figure(figsize=(19, 9)) dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() #", "and real values. dff = pd.DataFrame() for i in range(len(stock_catl)", "shift=1).flat_map(batch_dataset) ds_label = tf.constant(dataset.values[window_size:], dtype=tf.float32) ds_label = tf.data.Dataset.from_tensor_slices(ds_label) ds_train =", "tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train = zip_ds(train)", "optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches,", "@Month2 : 五月 \"\"\" import tushare as ts import tensorflow", "/ \\ (stock_catl.max() - stock_catl.min()) # train, val = train_test_split(stock_catl,", "tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer =", "plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare fitting and real values.", "validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19, 9))", "as tf import pandas as pd from sklearn.model_selection import train_test_split", "predict future 100 business days. dfp = stock_catl.copy() for i", "model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp =", "# train = train.sort_index(ascending=True) # val = val.sort_index(ascending=True) train =", "def batch_dataset(dataset): dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset):", "= train_test_split(stock_catl, test_size=0.5) # train = train.sort_index(ascending=True) # val =", "as ts import tensorflow as tf import pandas as pd", "= tf.data.Dataset.from_tensor_slices(ds_label) ds_train = tf.data.Dataset.zip((ds_data, ds_label)).batch(128).repeat() return ds_train ds_train =", "val = stock_catl.iloc[-60:, :] window_size = 30 column = 'high'", "Compare fitting and real values. dff = pd.DataFrame() for i", "= 30 column = 'high' epoches = 300 def batch_dataset(dataset):", "window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits =", "fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :], axis=0))) dffits = pd.DataFrame(fits,", "for i in range(100): pres = model.predict(tf.constant(tf.expand_dims(dfp.values[-1 * window_size:], axis=0)))", ": 五月 \"\"\" import tushare as ts import tensorflow as", "as pd from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt", "train_test_split import matplotlib.pyplot as plt stock_catl = ts.get_hist_data('300750') stock_catl =", "pd from sklearn.model_selection import train_test_split import matplotlib.pyplot as plt stock_catl", "window_size = 30 column = 'high' epoches = 300 def", "dataset_batched = dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data =", "tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13) ] ) optimizer = tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse')", "@Filename: stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20 13:18:46 @Time2", "= dataset.batch(window_size, drop_remainder=True) return dataset_batched def zip_ds(dataset): ds_data = tf.constant(dataset.values,", "= zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128,", "dff[column].plot() stock_catl.iloc[window_size:, :][column].plot(style='-o') plt.show() # To predict future 100 business", "tf.keras.optimizers.Adam(learning_rate=0.01) model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val,", "plt.show() # To predict future 100 business days. dfp =", "= zip_ds(train) ds_val = zip_ds(val) model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128,", "window_size:], axis=0))) dfpres = pd.DataFrame(pres, columns=stock_catl.columns) dfp = dfp.append(dfpres, ignore_index=True)", "dffits = pd.DataFrame(fits, columns=stock_catl.columns) dff = dff.append(dffits) dff.index = stock_catl.index[window_size:]", "in range(len(stock_catl) - window_size): fits = model.predict(tf.constant(tf.expand_dims(stock_catl.values[i:i + window_size, :],", "model = tf.keras.Sequential( [ tf.keras.layers.LSTM(128, return_sequences=True, activation='relu'), tf.keras.layers.LSTM(128, activation='relu'), tf.keras.layers.Dense(13)", "= plt.gca() plt.plot(range(len(history.history['loss'])), history.history['loss']) plt.plot(range(len(history.history['val_loss'])), history.history['val_loss']) ax.set_yscale('log') plt.show() # Compare", "= (stock_catl - stock_catl.mean()) / \\ (stock_catl.max() - stock_catl.min()) #", "2020-05-20 13:18:46 @Time2 : 2020/5/20 13:18 @Month1 : 5月 @Month2", "To predict future 100 business days. dfp = stock_catl.copy() for", "stockPrediction202005201318.py @IDE : PyCharm @Time1 : 2020-05-20 13:18:46 @Time2 :", ": 5月 @Month2 : 五月 \"\"\" import tushare as ts", "steps_per_epoch=5, validation_data=ds_val, validation_steps=1 ) model.save('stockLSTM') # Plot loss function plt.figure(figsize=(19,", "# To predict future 100 business days. dfp = stock_catl.copy()", "= ts.get_hist_data('300750') stock_catl = stock_catl.sort_index(ascending=True) stock_catl = (stock_catl - stock_catl.mean())", "model.compile(optimizer=optimizer, loss='mse') history = model.fit( ds_train, epochs=epoches, steps_per_epoch=5, validation_data=ds_val, validation_steps=1", "- stock_catl.min()) # train, val = train_test_split(stock_catl, test_size=0.5) # train" ]
[ "userid) for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] =", "if request.method == 'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data", "5 \"\"\" % userid) for a in g.database.fetchall(): data={} data['rid']=a[0]", "charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST'])", "\"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and", "and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE", "artist import artist import pymysql import hashlib from flask import", "== f['friendid']: return True else: return False def getAllComments(userid): g.database.execute(\"SELECT", "User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends", "% ( value )) for a in g.database.fetchall(): data={} data['title']=a[0]", "pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request", "Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE \"%s\" \"\"\" %", "# Disable Flask's debugger if external debugger is requested use_debugger", "MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if", "the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin", "#for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS =", "tos(): return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def", "Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"Name\"]=g.database.fetchone()", "Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\" \"\"\" % playlistid) for p", "FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC LIMIT 5\"", "flash(\"Username or Email has been taken\") else: flash_errors(contactform) return render_template('homepage/index.html',", ") session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid data !\") else: flash(\"Username or", "return False g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\" %", "def index(): session[\"login\"] = False session[\"signup\"] = False session[\"logged_in\"] =", "requests SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (0,userfrom,userto))", "WHERE song_id=%s\", (songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums", "or Email has been taken\") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'),", "data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\"", "secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename session['profilepic'] =", "FROM playlists WHERE Playlist_id=%s and User_id=%s \"\"\" % (playlistid, userid))", "return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom):", "User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0' and", "admin import admin from artist import artist import pymysql import", "for song in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from", "the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png',", "+ filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE", "getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\"", "= [] search_playlist =[] search_artist = [] check_song = g.database.execute(\"\"\"SELECT", "{} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name from MuShMe.entries WHERE", "ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into", "form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User),", "redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' +", "file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath =", "-*- from src import app import os import shutil from", "ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type,", "Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" LIMIT 5 \"\"\" % userid)", "\"\"\" % userid) for user in g.database.fetchall(): data = {}", "username) if email or name: return False else: return True", "filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\"", "d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend =[] g.database.execute(\"\"\"", "@app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return", "\"\"\" % (userid)) g.conn.commit() for uid in userid: session['userid'] =", "= g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login():", "LIMIT 5 \"\"\" % userid) for a in g.database.fetchall(): data={}", "changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not in", "filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename", "entries where User_id='%s' \"\"\" % a[1]) for i in g.database.fetchall():", "retval = [] for commentid in commentids: g.database.execute(\"SELECT Comment, User_id", "Comment_id from MuShMe.comments WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data)) data =", "getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id", "g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic']", "# -*- coding: utf-8 -*- from src import app import", "from friends WHERE User_id2=\"%s\" \"\"\" % userid) for user in", "False def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER", "(userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\"", "(uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if", "search_artist = [] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE", "MuShMe.entries WHERE Username LIKE \"%s\" or Name LIKE \"%s\" \"\"\"", "loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND", "for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id", "comment(rcvrid, senderid): if request.method == 'POST': commentform = CommentForm(request.form, prefix='form4')", "flash(\"Please enter valid data !\") else: flash(\"Username or Email has", "sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO", "mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s", "g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return", "g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method ==", "return False else: return True @app.route('/search',methods=['POST','GET']) def search(): if request.method", "check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher", "in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\"", "recommend where User_id_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall():", "(\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments", "if request.method == 'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\" WHERE", "session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto))", "User_id=\"%s\" \"\"\" % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1]", "Comment=\"%s\" \"\"\" % (commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment", "(\"\"\" INSERT INTO friends Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query)", "Comment_id=\"%s\" \"\"\" % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET'])", "g.database.execute(\"\"\" SELECT Song_id from recommend_songs where Recommend_id=\"%s\" \"\"\" % a[0])", "False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup", "\"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (\"\"\" INSERT INTO", "= False session[\"logged_in\"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For", "(newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid data !\")", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for a in", "@app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method == 'POST': commentform =", "#print data enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES", "render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if", "Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s", "g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE \"%s\" \"\"\"", "% ( value, value )) for a in g.database.fetchall(): data", "@app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST': reportform = ReportForm(request.form,", "'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT", "% uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE", "Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form):", "'POST': uid = userid print request.form if request.form['editname'] != '':", "getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\"", "= secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename session['profilepic']", "else: flash(\"Incorrect Email-Id or Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else:", "( value, value )) for a in g.database.fetchall(): data =", "= False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data):", "for song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] =", "where Username=\"%s\" \"\"\" % username) if email or name: return", "== f['friendid']: return True else: return False g.database.execute(\"\"\"SELECT User_id1 from", "in session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug:", "aptana to receive errors, set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if", "request.method == 'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report", "where User_id=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={}", "User_id=\"%s\" \"\"\" % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries", "in userid: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE", "data['commentid'] = commentid[0] g.database.execute(\"SELECT Username FROM entries WHERE User_id=%s\", (data['userid']))", "entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST'])", "import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail", "(data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName", "= g.database.execute(\"\"\" SELECT * from MuShMe.entries where Email_id=\"%s\" \"\"\" %", "5\" % (userid)) commentids = g.database.fetchall() retval = [] for", "User_id=%s ORDER BY Comment_id DESC\" % (userid)) commentids = g.database.fetchall()", "= ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints", "def about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout')", "= song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] =", "(contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from", "use_debugger = True try: # Disable Flask's debugger if external", "% (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in user_id:", "g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE \"%s\" \"\"\"", "= g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE \"%s\"", "g.database.fetchall(): d={} d['userid'] = i[0] d['username'] = i[1] d['name'] =", "Email has been taken\") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform)", "mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING)", "WHERE User_id=\"%s\" \"\"\" % uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from", "file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER + filename session['profilepic'] = filepath", "from MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\" % userid) for song", "% uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT Name", "\"\"\" % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data)", "'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute(\"\"\"INSERT", "flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(\"\"\"", "% uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE User_id=\"%s\"", "% uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE", "session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\"", "# To allow aptana to receive errors, set use_debugger=False app", "if request.method == 'POST': searchform = searchForm(prefix='form6') #print 'f' value", "search(): if request.method == 'POST': searchform = searchForm(prefix='form6') #print 'f'", "else: flash(\"Incorrect Email-Id or Password\") else: flash(\"Incorrect Email-Id or Password\")", "g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\" \"\"\" % userid) for", "render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return", "in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method ==", "FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0]", "SELECT * from MuShMe.entries where Email_id=\"%s\" \"\"\" % email) name", "shutil from flask import Flask, render_template, session, request, flash, url_for,", "length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform", "= g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User = []", "\"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile',", "LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id from MuShMe.entries", "@app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\"", "WHERE User_id=%s LIMIT 5\"\"\" % userid) for song in g.database.fetchall():", "session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid", "data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists", "(\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT", "recommend.append(data) return recommend def getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to", "render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def about():", "User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid):", "g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE \"%s\" \"\"\" %", "from recommend_playlists where Recommend_id=\"%s\" \"\"\" % a[0]) if check_playlist: playlistid", ") session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default collection' g.database.execute(\"\"\"INSERT", "{} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username, Name, Profile_pic,", "SELECT Username from entries where User_id='%s' \"\"\" % a[1]) data['userfromname']", "song in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs", "MuShMe.songs WHERE Song_title LIKE \"%s\" \"\"\" % ( value ))", "SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" % (filepath, userid)) g.conn.commit() return", "SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" % (userid)) g.conn.commit() for uid", "\"\"\" % ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id", "if request.method == 'POST': file = request.files['file'] if file and", "userid == f['friendid']: return True else: return False def getAllComments(userid):", "User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % (userid)) for user", "f['friendid']: return True else: return False g.database.execute(\"\"\"SELECT User_id1 from friends", "True else: return False def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments", "== 'POST': uid = userid print request.form if request.form['editname'] !=", "\"%s\" or Name LIKE \"%s\" \"\"\" % ( value, value", "app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method == 'POST':", "Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\" \"\"\" % songid) for song", "= True session[\"login\"] = False contactform = ContactForm(request.form, prefix='form2') if", "% (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False", "Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if", "g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['privilege']", "for a in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data)", "def editName(userid): if request.method == 'POST': uid = userid print", "g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit()", "WHERE Artist_name LIKE \"%s\" \"\"\" % ( value )) for", "True session[\"signup\"] = False if request.method == 'POST': loginform =", "return render_template('error.html'), 404 else: if request.method == 'GET': User=getUserData(userid) return", "% ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from", "return '.' in filename and \\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS", "% (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are", "data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT User_id,Username,Name from entries", "= g.database.execute(\"\"\" SELECT Song_id from recommend_songs where Recommend_id=\"%s\" \"\"\" %", "session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug: import", "= g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % a[1]) for k", "g.database.fetchall() retval = [] for commentid in commentids: g.database.execute(\"SELECT Comment,", "reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO", "= g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\"", "= (\"\"\" INSERT INTO friends Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto))", "check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs where Recommend_id=\"%s\" \"\"\"", "__name__ == \"\"\"__main__\"\"\": # To allow aptana to receive errors,", "% (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\"", "'.' in filename and \\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file',", "User_id,Username,Name from entries where User_id='%s' \"\"\" % a[1]) for i", "#print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method", "app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the artist pages", "redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id or Password\") else: flash(\"Incorrect Email-Id", "from api import API from songs import SONG from playlist", "songs import SONG from playlist import playlist from admin import", "type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s", "User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\"", "return recommend def getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from", "d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id from MuShMe.entries", "uid #print userid return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id or", "flash, url_for, redirect from Forms import ContactForm, LoginForm, editForm, ReportForm,", "where Email_id=\"%s\" \"\"\" % email) name = g.database.execute(\"\"\" SELECT *", "hashlib from flask import g mail = Mail() mail.init_app(app) #For", "data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6])", "}) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method == 'POST': commentform", "( value )) for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1]", "WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data)) data = g.database.fetchone()[0] #print data", "(filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app =", "prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE", "To allow aptana to receive errors, set use_debugger=False app =", "for a in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2]", "playlist = request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute(\"\"\"DELETE FROM playlists", "passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close()", "value )) for a in g.database.fetchall(): data = {} data['pname']=a[0]", "app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location:", "Email_id=\"%s\" \"\"\" % email) name = g.database.execute(\"\"\" SELECT * from", "in errors: flash(u\"Error in the %s field - %s\" %", "search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id from MuShMe.entries", "WHERE User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic", "#print senderid #print rcvrid if commentform.comment.data: query = (\"\"\"INSERT INTO", "User_id from MuShMe.entries WHERE Username LIKE \"%s\" or Name LIKE", "= songid data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists", "request.method == 'POST': searchform = searchForm(prefix='form6') #print 'f' value =", "= {} g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "data request.append(data) return request def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs", "'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if", "app.debug: import logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1',", "data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend =[]", "VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if", "import SONG from playlist import playlist from admin import admin", "% userid) for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT", "MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id", "[] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE", "User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"])", "friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\" % userid)", "for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\"", "('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments", "g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id from MuShMe.entries WHERE Username LIKE", "belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle", "userid return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id or Password\") else:", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT", "'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def index():", "uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "+ app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method ==", "def acceptrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests SET", "return recommend def getRequest(userid): request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from", "\"\"\" % a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data)", "commentid[0] g.database.execute(\"SELECT Username FROM entries WHERE User_id=%s\", (data['userid'])) data['username'] =", "session[\"signup\"] = False session[\"logged_in\"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2'))", "= 'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"] = False", "in the %s field - %s\" % ( getattr(form, field).label.text,", "g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0]", "g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data))", "def signup(): session[\"signup\"] = True session[\"login\"] = False contactform =", "% user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for", "MuShMe.songs WHERE Song_id=\"%s\" \"\"\" % song) for a in g.database.fetchall():", "= g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE \"%s\"", "search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name", "g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\"", "search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def", "SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\" \"\"\" % userid) for", "(Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data, commentid,", "g mail = Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API);", "user_id = g.database.fetchone() for uid in user_id: session['userid'] = uid", "import playlist from admin import admin from artist import artist", "\"\"\" % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2]", "' default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\"", "data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists WHERE", "return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def", "- %s\" % ( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST'])", "'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\")", "UPDATE requests SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" %", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist", "5\"\"\" % userid) for song in g.database.fetchall(): data = {}", "songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from", "the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the", "song[2] d['publisher'] = song[3] d['songid'] = songid data['song'].append(d) check_playlist =", "a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return", "uid = userid print request.form if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE", "SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query)", "g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User", "debugger if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except:", "g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" % (userid)) g.conn.commit()", "Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function:", "User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b:", "data enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\"", "d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend =[]", "admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER =", "if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\" SELECT", "def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors", "return songName def getUserData(userid): User = [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB", "def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session[\"login\"] = True", "and Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (\"\"\"", "getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def signup(): session[\"signup\"] =", "=[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" \"\"\" %", "render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request(): g.conn", "% a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(\"\"\"", "return render_template('login.html') if not app.debug: import logging from logging.handlers import", "( value )) for a in g.database.fetchall(): data = {}", "where Playlist_id=\"%s\" \"\"\" % playlistid) for p in g.database.fetchall(): d=", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT", "=[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % (userid))", "else: flash(\"Username or Email has been taken\") else: flash_errors(contactform) return", "MuShMe.entries SET Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit() if", "for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2]", "[] g.database.execute(\"\"\" SELECT User_id,Username,Name from entries where User_id='%s' \"\"\" %", "def upload_file(userid): if request.method == 'POST': file = request.files['file'] if", "% (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report == True:", "search_playlist.append(data) length = len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist)", "SONG from playlist import playlist from admin import admin from", "g.database.execute(query) g.conn.commit() query = (\"\"\" INSERT INTO friends Values (\"%s\",\"%s\")", "(userid)) for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username,", "Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\" % userid) for", "False else: return True @app.route('/search',methods=['POST','GET']) def search(): if request.method ==", "session['logged_in'] = True session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile',", "value = searchform.entry.data + '%' search_fname = [] search_song= []", "from MuShMe.entries where Username=\"%s\" \"\"\" % username) if email or", "WHERE User_id=%s ORDER BY Comment_id DESC\" % (userid)) commentids =", "pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg',", "song in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1]", "data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from entries where", "data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name,", "ORDER BY Comment_id DESC LIMIT 5\" % (userid)) commentids =", "@app.route('/signup', methods=['POST']) def signup(): session[\"signup\"] = True session[\"login\"] = False", "g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\"", "SELECT Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\" \"\"\" % userid) for", "app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler)", "playlist from admin import admin from artist import artist import", "g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\" \"\"\" % songid)", "def comment(rcvrid, senderid): if request.method == 'POST': commentform = CommentForm(request.form,", "= len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist) + len(search_fname)", "d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from", "app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger = True try: #", "user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print", "in commentids: g.database.execute(\"SELECT Comment, User_id FROM comments WHERE Comment_id=%s\", (commentid[0]))", "(userid)) commentids = g.database.fetchall() retval = [] for commentid in", "def getRequest(userid): request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests where", "Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE \"%s\" \"\"\" % (", "g.database.execute(\"\"\" SELECT Username from entries where User_id='%s' \"\"\" % a[1])", "g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s \"\"\" % (playlistid,", "friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\"", "+ ' default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES", "WHERE User_id1=\"%s\" \"\"\" % userid) for user in g.database.fetchall(): data", "ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail from", "g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs where Recommend_id=\"%s\"", "from MuShMe.songs WHERE Song_id=\"%s\" \"\"\" % song) for a in", "Username, Name, Profile_pic, User_id from MuShMe.entries WHERE Username LIKE \"%s\"", "value )) for a in g.database.fetchall(): data = {} data['username']=a[0]", "\"%s\" \"\"\" % ( value )) for a in g.database.fetchall():", "data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName def getPlaylist(userid): playlist", "g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid)", "data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\" \"\"\"", "getSong(userid): songName = [] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE User_id=%s", "def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs WHERE song_id=%s\", (songid)) albumname", "= g.database.execute(\"\"\" SELECT Status from requests where Request_to=\"%s\" and Request_from=\"%s\"", "== 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'),", "= {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT", "= [] g.database.execute(\"\"\" SELECT User_id,Username,Name from entries where User_id='%s' \"\"\"", "\"\"\" % username) if email or name: return False else:", "\"\"\" % (userid,f['friendid'])) if a or b: return True elif", "g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName def", "print data request.append(data) return request def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM", "redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in", "= create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger = True try: # Disable", "User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b:", "to receive errors, set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug:", "return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT Status", "from friends WHERE User_id1=\"%s\" \"\"\" % (userid)) for user in", "(\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT", "= True g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute(\"\"\"SELECT", "for playlistid in playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s and", "data = {} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE", "Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the songs", "playlist def getSong(userid): songName = [] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song", "g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid):", "check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True", "= g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "i[2] data['reqfromuser'].append(d) print data request.append(data) return request def getSongArt(songid): g.database.execute(\"SELECT", "friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2", "data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id from", "(userid,f['friendid'])) if a or b: return True elif userid ==", "Disable Flask's debugger if external debugger is requested use_debugger =", "field).label.text, error )) @app.route('/signup', methods=['POST']) def signup(): session[\"signup\"] = True", "(0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (\"\"\" INSERT INTO friends Values", "== 'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login =", "g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid )", "return True elif userid == f['friendid']: return True else: return", "where Recommend_id=\"%s\" \"\"\" % a[0]) if check_song: songid = g.database.fetchone()[0]", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT", "the admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER", "% (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE", "if check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return", "g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT", "= g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(\"\"\" SELECT Song_id from", "k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) +", "404 not found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404", "check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile',", "song[3] d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist =", "% a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length", "loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id", "% uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid", "Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC\" %", "return retval def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends", "User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data,", "if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'),", "from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest()))", "SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query)", "userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are belong to", "render_template('login.html') if not app.debug: import logging from logging.handlers import SMTPHandler", "% p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d)", "User = [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\"", "{'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if", "def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html')", "song_id=%s\", (songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums WHERE", "% (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id", "Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE \"%s\" \"\"\" %", "request.method == 'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid #print", "g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs", "g.database.execute(\"\"\" SELECT * from MuShMe.entries where Email_id=\"%s\" \"\"\" % email)", "WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return", "recommend_songs where Recommend_id=\"%s\" \"\"\" % a[0]) if check_song: songid =", "from flask import g mail = Mail() mail.init_app(app) #For the", "session[\"logged_in\"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections.", "'': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']], userid))", "def getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE", "#print rcvrid if commentform.comment.data: query = (\"\"\"INSERT INTO MuShMe.comments (comment_type,", "'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def", "True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False: return render_template('error.html'),", "enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" %", "check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id", "a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\"", "where User_id='%s' \"\"\" % a[1]) for i in g.database.fetchall(): d={}", "retval def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE", "python # -*- coding: utf-8 -*- from src import app", "d['publisher'] = song[3] d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(\"\"\"", "\"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + '", "Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] =", "comments WHERE Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone() data = {}", "in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend =", "#for the admin pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist);", "requests where Request_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall():", "@app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST': uid = userid", "request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s", "import admin from artist import artist import pymysql import hashlib", "and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE", "g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre'] =", "a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT Song_id from", "def getSong(userid): songName = [] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE", "INTO MuShMe.comments (comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data,", "logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'],", "pages app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\"", "redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\"", "[] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\" \"\"\" %", "artist import pymysql import hashlib from flask import g mail", "([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month'] !=", "g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2", "from playlists where Playlist_id=\"%s\" \"\"\" % playlistid) for p in", "data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from", "where User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song =", "the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the", "Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\" \"\"\" % userid) for a", "'0' and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE", "CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail from werkzeug", "in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s", "WHERE Comment_id=\"%s\" \"\"\" % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid))", "@app.route('/login', methods=['POST']) def login(): session[\"login\"] = True session[\"signup\"] = False", "userid) for song in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album", "(a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid):", "session['UserName'] + ' default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id)", "d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request", "or Name LIKE \"%s\" \"\"\" % ( value, value ))", "set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/')", "in g.database.fetchall(): d = {} d['title']=song[0] d['album'] = song[1] d['genre']", "= song[2] d['publisher'] = song[3] d['songid'] = songid data['song'].append(d) check_playlist", "+ len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length", "g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic']", "userid=uid)) else: flash(\"Incorrect Email-Id or Password\") else: flash(\"Incorrect Email-Id or", "a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3]", "'<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler", "[] search_friend = [] search_playlist =[] search_artist = [] check_song", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % p[2]) for k in", "print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE Comment=\"%s\"", "def report(userid,commentid): if request.method == 'POST': reportform = ReportForm(request.form, prefix='form5')", "Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" % (userid)) g.conn.commit() for uid in", "Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (\"\"\" INSERT", "uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default collection'", "uid in user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries", "User_id2=\"%s\" \"\"\" % userid) for user in g.database.fetchall(): data =", "'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET'])", "g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def", "[] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\" \"\"\" % userid)", "where User_id_to=\"%s\" LIMIT 5 \"\"\" % userid) for a in", "commentform.comment.data: query = (\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES", "@app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email'", "WHERE User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def", "g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads':", "else: flash(\"Please enter valid data !\") else: flash(\"Username or Email", "session['logged_in'] == False: return render_template('error.html'), 404 else: if request.method ==", "hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE", "acceptrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\"", "from recommend where User_id_to=\"%s\" LIMIT 5 \"\"\" % userid) for", "Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler = FileHandler('log.txt')", "f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and", "playlists where Playlist_id=\"%s\" \"\"\" % playlistid) for p in g.database.fetchall():", "redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT Status from", "data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT", "= SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid,", "INTO MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment:", "#All your profile are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid):", "DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid))", "been taken\") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username):", "= AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" %", "entries where User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song", "form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(\"\"\" SELECT * from", "data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid']", "friends Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile',", "#For the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For", "import shutil from flask import Flask, render_template, session, request, flash,", "userid) for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1]", "data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id", "p in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return", "%(asctime)s Message: %(message)s ''')) if __name__ == \"\"\"__main__\"\"\": # To", "MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid))", "userid: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\"", "WHERE Username LIKE \"%s\" or Name LIKE \"%s\" \"\"\" %", "in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id", "'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler =", "WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName']", "flash(\"Incorrect Email-Id or Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return", "playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for the artist", "logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed')", "commentids = g.database.fetchall() retval = [] for commentid in commentids:", "rcvrid if commentform.comment.data: query = (\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment,", "% (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName def", "utf-8 -*- from src import app import os import shutil", "songid data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where", "(\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST'])", "data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName = []", "SELECT User_id,Username,Name from entries where User_id='%s' \"\"\" % a[1]) for", "return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST': uid", "from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d", "Message, Mail from werkzeug import secure_filename from werkzeug import SharedDataMiddleware", "g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist", "if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\" SELECT", "User def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend", "= SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging", "methods=['POST']) def login(): session[\"login\"] = True session[\"signup\"] = False if", "ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method == 'POST':", "FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" % data) #print g.database.fetchone()[0] return", "f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and", "requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" %", "User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please", "recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" LIMIT", ")) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\"", "% data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid):", "uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\"", "Comment, User_id FROM comments WHERE Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone()", "= request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'],", "user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id from", "render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug: import logging from", "data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id from", "=[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" LIMIT 5", "% (userid,f['friendid'])) if a or b: return True elif userid", "b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" %", "@app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session[\"login\"] =", "\"\"\" % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True)", "getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id", "(data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute(\"SELECT", "User_id1=\"%s\" \"\"\" % userid) for user in g.database.fetchall(): data =", "g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect')", "WHERE User_id=\"%s\" \"\"\" % a[1]) for k in g.database.fetchall(): data['username']=k[0]", "{} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName =", "g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are belong to us.", "SELECT Song_id from recommend_songs where Recommend_id=\"%s\" \"\"\" % a[0]) if", "\"\"\" % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def", "default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" %", "{} g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "[] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\" \"\"\" %", "if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests", "userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename):", "a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\"", "searchform.entry.data + '%' search_fname = [] search_song= [] search_friend =", "filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if", "data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT", "MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment: g.conn.commit()", "playlist import playlist from admin import admin from artist import", "print friendName return friendName def getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT", "\"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song =", "!= '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']],", "= pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor()", "@app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests", "friends WHERE User_id2=\"%s\" \"\"\" % userid) for user in g.database.fetchall():", "def rejectrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests SET", "g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method", "in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE", "form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for field, errors", "\"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and", "page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about')", "for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\"", "file = request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename)", "length = len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist) +", "FROM comments WHERE Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone() data =", "data['art'] = g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User =", "Name,User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % p[2]) for k", "check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data,", "Comment_id DESC\" % (userid)) commentids = g.database.fetchall() retval = []", "= g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data,", "userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE", "request.form if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE", "playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User):", "= g.database.fetchall() retval = [] for commentid in commentids: g.database.execute(\"SELECT", "errors, set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger =", "% uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE User_id=\"%s\"", "User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getComments(userid):", "g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] = commentdata[1]", "upload_file(userid): if request.method == 'POST': file = request.files['file'] if file", "in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser']", "form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid),", "= commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT Username FROM entries WHERE", "where User_id_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={}", "Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid=", "a or b: return True elif userid == f['friendid']: return", "= i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data) return", "INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data,", "senderid #print rcvrid if commentform.comment.data: query = (\"\"\"INSERT INTO MuShMe.comments", "value )) for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2]", "import pymysql import hashlib from flask import g mail =", "data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs", "SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" LIMIT 5 \"\"\" %", "friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\" %", "Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] =", "data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where Recommend_id=\"%s\"", "def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\"", "g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" % data)", "g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\"", "% userid) for p in g.database.fetchall(): data = {} data['pname']=p[0]", "def getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where", "entries where User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] print", "d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request =[] g.database.execute(\"\"\"", "MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" % (userid)) g.conn.commit() for", "\"\"\" % ( value )) for a in g.database.fetchall(): data={}", "if 'email' not in session: return render_template('error.html') session['logged_in']=False return render_template('login.html')", "userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app,", "= session['UserName'] + ' default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name,", ")) for a in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1]", "for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return", "+ len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length)", "from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest()))", "data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "% (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist", "filename)) filepath = UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE", "data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = []", "Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data, commentid, session['userid']", "def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\"", "data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id", "\"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/'", "for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist)", "file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s", "g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % (userid)) for", "return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in filename and", "else: return redirect(url_for(('index'))) def flash_errors(form): for field, errors in form.errors.items():", "a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from", "userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto):", "port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def", "return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not in session:", "userid) for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username,", "werkzeug import SharedDataMiddleware from api import API from songs import", "friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if a", ")) for a in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2]", "= g.database.fetchone()[0] #print data enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id,", "{} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\" \"\"\" % song)", "render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid),", "a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from entries where User_id='%s' \"\"\"", "g.database.execute(\"\"\" SELECT Username, Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\"", "render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT", "from MuShMe.artists WHERE Artist_name LIKE \"%s\" \"\"\" % ( value", "= [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\" \"\"\"", "data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name from", "g.database.fetchone() for uid in user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT Username", "from songs where Song_id=\"%s\" \"\"\" % songid) for song in", "from admin import admin from artist import artist import pymysql", "d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3]", "User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] +", "collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist", "User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b: return True", "flash(\"Incorrect Email-Id or Password\") else: flash(\"Incorrect Email-Id or Password\") return", "contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES", "g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data,", "def sendrequest(uidto,uidfrom): if request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT", "and \\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def", "import SharedDataMiddleware from api import API from songs import SONG", "app import os import shutil from flask import Flask, render_template,", "Username, User_id, Profile_pic from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0])", "#print uid #print userid return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id", "request.method == 'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login", "session[\"login\"] = False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if", "song[2] d['publisher'] = song[3] d['songid'] = songid d['songart'] = getSongArt(songid)", "db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login',", "False if request.method == 'POST': loginform = LoginForm(request.form, prefix='form1') if", "commentids: g.database.execute(\"SELECT Comment, User_id FROM comments WHERE Comment_id=%s\", (commentid[0])) commentdata", "render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(\"\"\" SELECT *", "from MuShMe.playlists WHERE User_id=\"%s\" \"\"\" % userid) for p in", "User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data,", "i in g.database.fetchall(): d={} d['userid'] = i[0] d['username'] = i[1]", "'f' value = searchform.entry.data + '%' search_fname = [] search_song=", "Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile',", "methods=['POST']) def signup(): session[\"signup\"] = True session[\"login\"] = False contactform", "User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b: return True", "albums WHERE Album_id=%s \" % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0]", "has been taken\") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def", "is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger, threaded=True,", "uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "% username) if email or name: return False else: return", "else: return True @app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST':", "or Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def", "= g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs where", "recommend.append(data) return recommend def getRequest(userid): request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status", "AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for", "request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s \"\"\",", "Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC LIMIT", "from MuShMe.entries where Email_id=\"%s\" \"\"\" % email) name = g.database.execute(\"\"\"", "data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from", "us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not", "= [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\" \"\"\"", "!= '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" %", "a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums WHERE", "profile are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6'))", "g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % userid) for", "== 'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\" WHERE Request_from=\"%s\" and", "return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static'", "g.conn.cursor() @app.teardown_request def teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session[\"login\"]", "g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" % (filepath, userid))", "\"\"\" % p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2]", "UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"] = False session[\"signup\"] = False", "False session[\"signup\"] = False session[\"logged_in\"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'),", "uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT Name from", "<filename>src/mushme.py<gh_stars>1-10 #!/usr/bin/env python # -*- coding: utf-8 -*- from src", "filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" % (filepath,", "connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>',", "\"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries", "session['logged_in'] = True g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "Profile_pic, User_id from MuShMe.entries WHERE Username LIKE \"%s\" or Name", "from MuShMe.entries WHERE Username LIKE \"%s\" or Name LIKE \"%s\"", "% (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query = (\"\"\" INSERT INTO friends", "= CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if commentform.comment.data: query", "return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for", "api import API from songs import SONG from playlist import", "g.conn.commit() for uid in userid: session['userid'] = uid g.database.execute(\"\"\"SELECT Username", "WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login:", "checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\"", "and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET'])", "False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] ==", "def allowed_file(filename): return '.' in filename and \\ filename.rsplit('.', 1)[1]", "d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRecommend(userid): recommend", "data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist =", "'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"] = False session[\"signup\"]", "= g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT", "%(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__ == \"\"\"__main__\"\"\":", "data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from entries where User_id='%s' \"\"\" %", "commentid in commentids: g.database.execute(\"SELECT Comment, User_id FROM comments WHERE Comment_id=%s\",", "editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message, Mail", "session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "or Password\") else: flash(\"Incorrect Email-Id or Password\") return render_template('homepage/index.html', form1=loginform,", "True session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile', userid=uid)) else:", "f['friendid']: return True else: return False def getAllComments(userid): g.database.execute(\"SELECT Comment_id", "search_friend = [] search_playlist =[] search_artist = [] check_song =", "= Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the", "allowed_file(filename): return '.' in filename and \\ filename.rsplit('.', 1)[1] in", "and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto))", "Playlist_id=%s and User_id=%s \"\"\" % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid))", "data['friendpic']=a[2] friendName.append(data) print friendName return friendName def getPlaylist(userid): playlist =", "def search(): if request.method == 'POST': searchform = searchForm(prefix='form6') #print", "for uid in userid: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from", "User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song", "WHERE User_id=\"%s\" \"\"\" % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid))", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % a[1]) for k in", "WHERE User_id=\"%s\" \"\"\" % (userid)) g.conn.commit() for uid in userid:", "g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\"", "VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter", "in filename and \\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET',", "= g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where Recommend_id=\"%s\" \"\"\" %", "logout(): if 'email' not in session: return render_template('error.html') session['logged_in']=False return", "filepath = UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries", "Username, Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % a[1]) for", "def flash_errors(form): for field, errors in form.errors.items(): for error in", "Song_id=\"%s\" \"\"\" % songid) for song in g.database.fetchall(): d =", "1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method", "(contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in user_id: session['userid']", "MuShMe.playlists WHERE Playlist_name LIKE \"%s\" \"\"\" % ( value ))", "= [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\" \"\"\" %", "\"%s\" \"\"\" % ( value, value )) for a in", "if commentform.comment.data: query = (\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment, User_id)", "d['userid'] = i[0] d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d)", "'POST']) def upload_file(userid): if request.method == 'POST': file = request.files['file']", "g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" % data) #print", "'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\"", "data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request =[] g.database.execute(\"\"\" SELECT", "check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\"", "\"\"\" % userid) for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1]", "g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0' and", "\"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries", "SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\" \"\"\" % songid) for", "return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors @app.errorhandler(404) def", "User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid),", "AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid))", "User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True", "SELECT Playlist_id from recommend_playlists where Recommend_id=\"%s\" \"\"\" % a[0]) if", "'0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'],", "INSERT INTO friends Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit()", "not in session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if not", "data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid):", "getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where", "\"\"\" % (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report ==", "print request.form if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s", "g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\" \"\"\" % userid)", "data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends", "return False def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s", "(comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid)) print", "% (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET", "= ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT", "% userid) for song in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT", "SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" \"\"\" % userid) for", "data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists", "friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def", "form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid),", "False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def", "= getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists", "your profile are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return", "% uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default", "for field, errors in form.errors.items(): for error in errors: flash(u\"Error", "a in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend", "Recommend_id=\"%s\" \"\"\" % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist']", "data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s \" % (a[2]))", "FROM albums WHERE Album_id=%s \" % (a[2])) g.conn.commit() data['art'] =", "-*- coding: utf-8 -*- from src import app import os", "validate(email,username): email = g.database.execute(\"\"\" SELECT * from MuShMe.entries where Email_id=\"%s\"", "return False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in']", "def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where", "404 @app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def about(): return", "a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\"", "MuShMe.comments WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data)) data = g.database.fetchone()[0] #print", "data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data)", "MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" % (filepath, userid)) g.conn.commit()", "* from MuShMe.entries where Username=\"%s\" \"\"\" % username) if email", "Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__ ==", "not found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices')", "app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] })", "or b: return True elif userid == f['friendid']: return True", "@app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html')", "signup(): session[\"signup\"] = True session[\"login\"] = False contactform = ContactForm(request.form,", "Username FROM entries WHERE User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data)", "print reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by)", "d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "%(message)s ''')) if __name__ == \"\"\"__main__\"\"\": # To allow aptana", "playlist.append(data) return playlist def getSong(userid): songName = [] g.database.execute(\"\"\"SELECT Song_id", "\"\"\" % userid) for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom']", "from entries where User_id='%s' \"\"\" % a[1]) for i in", "g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0])", "contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries", "Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data, commentid, session['userid'] ))", "d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid d['songart']", "LIKE \"%s\" \"\"\" % ( value )) for a in", "Artist_name LIKE \"%s\" \"\"\" % ( value )) for a", "'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"]", "== 'POST': file = request.files['file'] if file and allowed_file(file.filename): filename", "Privilege FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['privilege'] =", "session[\"signup\"] = True session[\"login\"] = False contactform = ContactForm(request.form, prefix='form2')", "Name LIKE \"%s\" \"\"\" % ( value, value )) for", "email) name = g.database.execute(\"\"\" SELECT * from MuShMe.entries where Username=\"%s\"", "= commentid[0] g.database.execute(\"SELECT Username FROM entries WHERE User_id=%s\", (data['userid'])) data['username']", "to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404", "for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id,", "if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\"", "User_id='%s' \"\"\" % a[1]) for i in g.database.fetchall(): d={} d['userid']", "logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging", "create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger = True try: # Disable Flask's", "in playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s \"\"\"", "= song[3] d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT", "{} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0]", "check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")", "errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def tos():", "Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone()", "g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET']) def", "SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid):", "reportform.other.data, commentid, session['userid'] )) if check_report == True: g.conn.commit() return", "(commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment = g.database.execute(\"\"\"INSERT INTO", "= a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT User_id,Username,Name", "(userid)) g.conn.commit() for uid in userid: session['userid'] = uid g.database.execute(\"\"\"SELECT", "(data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\"", "import artist import pymysql import hashlib from flask import g", "field, errors in form.errors.items(): for error in errors: flash(u\"Error in", "d={} d['userid'] = i[0] d['username'] = i[1] d['name'] = i[2]", "print data['userfromname'] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs where", "\"\"\" % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE", "@app.route('/logout') def logout(): if 'email' not in session: return render_template('error.html')", "\"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for a in g.database.fetchall():", "Email-Id or Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index')))", "check_friend = g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id from MuShMe.entries WHERE", "songName = [] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT", "Song_id=\"%s\" \"\"\" % song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1]", "== False: return render_template('error.html'), 404 else: if request.method == 'GET':", "return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id or Password\") else: flash(\"Incorrect", "import app import os import shutil from flask import Flask,", "userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET']) def search(): if", "if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else: return", "return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for", "flash(u\"Error in the %s field - %s\" % ( getattr(form,", "Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE", "session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect", "form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid))", "== 'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid #print rcvrid", "else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method ==", "searchform = searchForm(prefix='form6') #print 'f' value = searchform.entry.data + '%'", "SELECT Username, Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % a[1])", "User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM", "\" % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return songName", "and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER", "#print userid return redirect(url_for('userProfile', userid=uid)) else: flash(\"Incorrect Email-Id or Password\")", "\"\"\"__main__\"\"\": # To allow aptana to receive errors, set use_debugger=False", "return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST':", "WHERE User_id=\"%s\" \"\"\" % user[0]) for a in g.database.fetchall(): data['friendname']=a[0]", "search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name", "request.append(data) return request def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs WHERE", "WHERE User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0'", "for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1", "= song[2] d['publisher'] = song[3] d['songid'] = songid d['songart'] =", "VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit()", "g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists where", "WHERE User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic", "user_comments WHERE User_id=%s ORDER BY Comment_id DESC\" % (userid)) commentids", "session[\"signup\"] = False if request.method == 'POST': loginform = LoginForm(request.form,", "Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" \"\"\" % userid) for a", "data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE", "friendName return friendName def getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id", "Recommend_id=\"%s\" \"\"\" % a[0]) if check_song: songid = g.database.fetchone()[0] data['song']", "AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends", "@app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe',", "= {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name from MuShMe.entries", "for i in g.database.fetchall(): d={} d['userid'] = i[0] d['username'] =", "from friends WHERE User_id1=\"%s\" \"\"\" % userid) for user in", "allow aptana to receive errors, set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\")", "recommend where User_id_to=\"%s\" LIMIT 5 \"\"\" % userid) for a", "in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def", "handle 404 not found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'),", "g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s \" % (a[2])) g.conn.commit()", "return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False: return", "g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username", "import g mail = Mail() mail.init_app(app) #For the collector script.", "g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name", "False session[\"logged_in\"] = False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database", "SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\" \"\"\" % playlistid) for", "User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid']))", "from recommend_songs where Recommend_id=\"%s\" \"\"\" % a[0]) if check_song: songid", "% songid) for song in g.database.fetchall(): d = {} d['title']=song[0]", "MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit()", "g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid )", "import Flask, render_template, session, request, flash, url_for, redirect from Forms", "Username=\"%s\" \"\"\" % username) if email or name: return False", "% (commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment = g.database.execute(\"\"\"INSERT", "\"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries", "songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id", "Profile_pic from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for a", "@app.route('/') def index(): session[\"login\"] = False session[\"signup\"] = False session[\"logged_in\"]", "data) #print g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if", "error in errors: flash(u\"Error in the %s field - %s\"", "(userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return False else:", "WHERE Song_title LIKE \"%s\" \"\"\" % ( value )) for", "redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) newPlaylist =", "script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist);", "WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from", "@app.route('/termsofservices') def tos(): return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html')", "return retval def getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s", "from MuShMe.playlists WHERE Playlist_name LIKE \"%s\" \"\"\" % ( value", "% a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] = []", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] =", "database connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root',", "return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return", "value, value )) for a in g.database.fetchall(): data = {}", "hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE", "DESC LIMIT 5\" % (userid)) commentids = g.database.fetchall() retval =", "song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM", "from requests where Request_to=\"%s\" \"\"\" % userid) for a in", "%(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s '''))", "for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in", "user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database = g.conn.cursor() @app.teardown_request def teardown_request(exception):", "collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid))", "data = {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username,", "for p in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data)", "render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors @app.errorhandler(404) def page_not_found_error(error):", "searchForm, AddPlaylist from flask.ext.mail import Message, Mail from werkzeug import", "search_fname = [] search_song= [] search_friend = [] search_playlist =[]", "(-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check =", "k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend", "and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\"", "from recommend where User_id_to=\"%s\" \"\"\" % userid) for a in", "+ len(search_song) + len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html',", "len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend,", "receive errors, set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger", "check_artist = g.database.execute(\"\"\"SELECT Artist_name, Artist_id from MuShMe.artists WHERE Artist_name LIKE", "friends WHERE User_id1=\"%s\" \"\"\" % (userid)) for user in g.database.fetchall():", "user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f", "= i[0] d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d) print", "g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST': uid =", "\"\"\" % email) name = g.database.execute(\"\"\" SELECT * from MuShMe.entries", "userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST': reportform =", "error )) @app.route('/signup', methods=['POST']) def signup(): session[\"signup\"] = True session[\"login\"]", "% a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT Song_id", "(\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" %", "from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if", "app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"] =", "WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id =", "User_id FROM comments WHERE Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone() data", "User_id_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={} data['rid']=a[0]", "ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import", "g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid )", "User_id_to=\"%s\" LIMIT 5 \"\"\" % userid) for a in g.database.fetchall():", "\"\"\", ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month']", "userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method", "INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit()", "MuShMe.comments (comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid))", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT", "Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\" \"\"\" % userid) for p", "session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "(Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT", "Message: %(message)s ''')) if __name__ == \"\"\"__main__\"\"\": # To allow", "= g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists", "in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\"", "% (userid)) commentids = g.database.fetchall() retval = [] for commentid", "where Request_to=\"%s\" and Request_from=\"%s\" \"\"\" % (userfrom,userto)) if check and", "if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\"", "getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\"", "return playlist def getSong(userid): songName = [] g.database.execute(\"\"\"SELECT Song_id from", "session, request, flash, url_for, redirect from Forms import ContactForm, LoginForm,", "'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def comment(rcvrid, senderid): if request.method", "search_song= [] search_friend = [] search_playlist =[] search_artist = []", "\"\"\" % userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom']", "return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request():", "for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName", "def login(): session[\"login\"] = True session[\"signup\"] = False if request.method", "friendName.append(data) for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE", "for a in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\"", "return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST':", "= False return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in']", "about(): return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def", "a in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT", "request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\"", "flask.ext.mail import Message, Mail from werkzeug import secure_filename from werkzeug", "% ( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def signup():", "data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username, Name from MuShMe.entries WHERE User_id=\"%s\"", "data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid): recommend", "WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name", "data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute(\"SELECT Comment_id", "and request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries", "WHERE Playlist_id=%s and User_id=%s \"\"\" % (playlistid, userid)) g.conn.commit() return", "if app.debug: use_debugger = True try: # Disable Flask's debugger", "retval.append(data) return retval def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from", "Comment_id DESC LIMIT 5\" % (userid)) commentids = g.database.fetchall() retval", "app.register_blueprint(API); #For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for", "= i[2] data['reqfromuser'].append(d) print data request.append(data) return request def getSongArt(songid):", "= userid print request.form if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries", "else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email =", "LIKE \"%s\" or Name LIKE \"%s\" \"\"\" % ( value,", "= True try: # Disable Flask's debugger if external debugger", "User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid']))", "%(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time: %(asctime)s Message:", "=[] search_artist = [] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs", "WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if a or", "requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return", "in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist", "g.database.execute(\"SELECT Song_Album FROM songs WHERE song_id=%s\", (songid)) albumname = g.database.fetchone()[0]", "d = {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2]", "'0' and request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE", "b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" %", "if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\"", "g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid))", "\"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in", "if __name__ == \"\"\"__main__\"\"\": # To allow aptana to receive", "external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger,", "\\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid):", "[] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\" \"\"\" %", "data['username'] = g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName =[]", "True session[\"login\"] = False contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit():", "= g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\"", "len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else:", "use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger = True try:", "data['song'] = [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\"", "and Request_from=\"%s\" \"\"\" % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and", ") session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid #print userid", "data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist =", "AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE", "import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR)", "form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1',", "else: return False def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE", "{} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id,", "return request def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs WHERE song_id=%s\",", "Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE \"%s\" \"\"\" % (", "if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass", "a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" %", "import secure_filename from werkzeug import SharedDataMiddleware from api import API", "'POST': file = request.files['file'] if file and allowed_file(file.filename): filename =", "data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song) + len(search_friend)", "True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST'])", "Username LIKE \"%s\" or Name LIKE \"%s\" \"\"\" % (", "userid)) g.conn.commit() if request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0'", "check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id from MuShMe.playlists WHERE Playlist_name LIKE", "FROM entries WHERE User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return", "= {} data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] =", "data['reqfromuser'].append(d) print data request.append(data) return request def getSongArt(songid): g.database.execute(\"SELECT Song_Album", "g.database.fetchone()[0] songName.append(data) return songName def getUserData(userid): User = [] g.database.execute(\"\"\"", "a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT User_id,Username,Name from", "elif userid == f['friendid']: return True else: return False g.database.execute(\"\"\"SELECT", "a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4])", "data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\"", "d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data) return request def", "%(module)s Function: %(funcName)s Time: %(asctime)s Message: %(message)s ''')) if __name__", "g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def", "in g.database.fetchall(): data = {} data['pname']=a[0] data['pid']=a[2] g.database.execute(\"\"\" SELECT Username,", "check = g.database.execute(\"\"\" SELECT Status from requests where Request_to=\"%s\" and", "validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" %", "data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\"", "for uid in user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from", "user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic", "Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\" \"\"\" % userid) for a", "g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid", "where Recommend_id=\"%s\" \"\"\" % a[0]) if check_playlist: playlistid = g.database.fetchone()[0]", "= songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT", "Username, Name,User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % p[2]) for", "in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4]", "= filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\" WHERE User_id=\"%s\" \"\"\" %", "session[\"login\"] = False session[\"signup\"] = False session[\"logged_in\"] = False return", "len(search_song) + len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'),", "email or name: return False else: return True @app.route('/user/<userid>',methods=['GET']) def", "User_id=\"%s\" \"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM", "and User_id=%s \"\"\" % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All", "@app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST': searchform = searchForm(prefix='form6')", "request def getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs WHERE song_id=%s\", (songid))", "= True session['logged_in']=True #print uid #print userid return redirect(url_for('userProfile', userid=uid))", "for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2]", "if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id)", "else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in filename", "data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data) check_playlist = g.database.execute(\"\"\"SELECT Playlist_name,User_id, Playlist_id", "= a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from entries where User_id='%s'", "redirect from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm,", "g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song) +", "data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT Username from entries", "filename and \\ filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS @app.route('/user/<userid>/file', methods=['GET', 'POST'])", "query=(\"\"\" UPDATE requests SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\"", "g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET'])", "= g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid'] =", "* from MuShMe.entries where Email_id=\"%s\" \"\"\" % email) name =", "in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3] search_friend.append(data)", "= {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege", "albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s\", (albumname))", "retval.append(data) return retval def getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE", "request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'),", "a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3]", "@app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False: return render_template('error.html'), 404", "prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % a[1]) for k in g.database.fetchall():", "if request.method == 'POST': uid = userid print request.form if", "404 else: if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid,", "#To handle 404 not found errors @app.errorhandler(404) def page_not_found_error(error): return", "mail = Mail() mail.init_app(app) #For the collector script. app.register_blueprint(API); #For", "found errors @app.errorhandler(404) def page_not_found_error(error): return render_template('error.html'), 404 @app.route('/termsofservices') def", "render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6'))", "#!/usr/bin/env python # -*- coding: utf-8 -*- from src import", "( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def signup(): session[\"signup\"]", "= {} g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\" \"\"\" %", "elif userid == f['friendid']: return True else: return False def", "== 'POST': searchform = searchForm(prefix='form6') #print 'f' value = searchform.entry.data", "(songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s\",", "and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b: return", "True elif userid == f['friendid']: return True else: return False", "= set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' + UPLOAD_FOLDER", "form2=contactform) def validate(email,username): email = g.database.execute(\"\"\" SELECT * from MuShMe.entries", "User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for a", "d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from", "Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit()", "WHERE Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment']", ")) for a in g.database.fetchall(): data = {} data['artistname']=a[0] data['artistid']=a[1]", "p[2]) for k in g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data)", "friends WHERE User_id1=\"%s\" \"\"\" % userid) for user in g.database.fetchall():", "LIMIT 5\"\"\" % userid) for song in g.database.fetchall(): data =", "commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT Username FROM", "data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return", "from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist", "= {} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher']", "INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return", "query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE Comment=\"%s\" \"\"\"", "Album_pic FROM albums WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def", "User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % userid) for user", "return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd') def changepwd():", "\"\"\" % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile", "request.method == 'POST': uid = userid print request.form if request.form['editname']", "from werkzeug import SharedDataMiddleware from api import API from songs", "% (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app", "from entries where User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0]", "= UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET", "% userid) for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] =", "uid in userid: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries", "def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute(\"\"\"DELETE", "WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from", "g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method ==", "Flask, render_template, session, request, flash, url_for, redirect from Forms import", "True @app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST': searchform =", "LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from flask.ext.mail import Message,", "query = (\"\"\" INSERT INTO friends Values (\"%s\",\"%s\") \"\"\" %", "data['pid']=p[1] playlist.append(data) return playlist def getSong(userid): songName = [] g.database.execute(\"\"\"SELECT", "songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin pages", "query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1))", "field - %s\" % ( getattr(form, field).label.text, error )) @app.route('/signup',", "data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s \" %", "data = {} g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries WHERE User_id=\"%s\"", "User_id=\"%s\" \"\"\" % user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1]", "request.method == 'POST': file = request.files['file'] if file and allowed_file(file.filename):", "User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" % data) #print g.database.fetchone()[0]", "import os import shutil from flask import Flask, render_template, session,", "g.database.execute(\"\"\" SELECT Status from requests where Request_to=\"%s\" and Request_from=\"%s\" \"\"\"", "% (userid)) g.conn.commit() for uid in userid: session['userid'] = uid", "%s field - %s\" % ( getattr(form, field).label.text, error ))", "Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto))", "return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7')", "User_id=%s ORDER BY Comment_id DESC LIMIT 5\" % (userid)) commentids", "d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist = g.database.execute(\"\"\"", "g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s \"", "data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT User_id,Username,Name from entries where User_id='%s'", "UPLOAD_FOLDER + filename session['profilepic'] = filepath g.database.execute(\"\"\"UPDATE MuShMe.entries SET Profile_pic=\"%s\"", "p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username,", "INTO friends Values (\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit() return", "request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET", "\"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto):", "render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not in session: return", "% userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] =", "app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif'])", "BY Comment_id DESC LIMIT 5\" % (userid)) commentids = g.database.fetchall()", "\"\"\" % a[0]) if check_song: songid = g.database.fetchone()[0] data['song'] =", "(\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid):", "for a in g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2]", "WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] =", "for error in errors: flash(u\"Error in the %s field -", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB", "else: if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'),", "if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s", "g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where User_id=\"%s\" \"\"\" % userid)", "@app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests", "[] search_playlist =[] search_artist = [] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id", "g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid data !\") else:", "in form.errors.items(): for error in errors: flash(u\"Error in the %s", "Playlist_id=\"%s\" \"\"\" % playlistid) for p in g.database.fetchall(): d= {}", "return render_template('error.html') session['logged_in']=False return render_template('login.html') if not app.debug: import logging", ")) for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3]", "d['publisher'] = song[3] d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d)", "senderid)) print query g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE", "try: # Disable Flask's debugger if external debugger is requested", "import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import", "= LoginForm(request.form, prefix='form1') if loginform.validate_on_submit(): check_login = g.database.execute(\"\"\"SELECT User_id from", "Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query =", "% playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1]", "user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\"", "from flask import Flask, render_template, session, request, flash, url_for, redirect", "''')) if __name__ == \"\"\"__main__\"\"\": # To allow aptana to", "@app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in", "= g.database.execute(\"\"\" SELECT * from MuShMe.entries where Username=\"%s\" \"\"\" %", "Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def", "requests where Request_to=\"%s\" and Request_from=\"%s\" \"\"\" % (userfrom,userto)) if check", "Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method ==", "g.database.execute(\"SELECT Username FROM entries WHERE User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0]", "len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song, search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length =", "= a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def", "(userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\"", "Pwdhash=\"%s\" \"\"\" % (contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid", "VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM", "artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found errors @app.errorhandler(404)", "return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid data !\") else: flash(\"Username", "'GET': User=getUserData(userid) return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid),", "if not app.debug: import logging from logging.handlers import SMTPHandler mail_handler", "= g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute(\"SELECT Comment_id FROM", "return redirect(url_for('userProfile',userid=userid)) #All your profile are belong to us. @app.route('/artist/<artistid>')", "User_id=\"%s\" \"\"\" % (filepath, userid)) g.conn.commit() return redirect(url_for('userProfile', userid=userid)) app.add_url_rule('/user/uploads/<filename>',", "= uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" %", "g.database.execute(query) g.conn.commit() g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE Comment=\"%s\" \"\"\" %", "for commentid in commentids: g.database.execute(\"SELECT Comment, User_id FROM comments WHERE", "data['comment'] = commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT", "from src import app import os import shutil from flask", "if request.method == 'POST': commentform = CommentForm(request.form, prefix='form4') #print senderid", "g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" %", "SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler)", "g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\" \"\"\" % playlistid)", "g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id from", "Email-Id or Password\") else: flash(\"Incorrect Email-Id or Password\") return render_template('homepage/index.html',", "% (userid)) for user in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT", "Username, User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for", "Mail from werkzeug import secure_filename from werkzeug import SharedDataMiddleware from", "% (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom):", "g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1] playlist.append(data) return playlist def", "in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends", "app.debug: use_debugger = True try: # Disable Flask's debugger if", "Password\") else: flash(\"Incorrect Email-Id or Password\") return render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2'))", "WHERE User_id=\"%s\" \"\"\" % uid ) session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM", "flask import g mail = Mail() mail.init_app(app) #For the collector", "WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit() query", "data['userfromname'] check_song = g.database.execute(\"\"\" SELECT Song_id from recommend_songs where Recommend_id=\"%s\"", "data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName: a=g.database.execute(\"\"\"SELECT User_id2 from", "check_login = g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\"", "in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from", "\"\"\" % (userid)) for user in g.database.fetchall(): data = {}", "in g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries", "rejectrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\"", "Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (0,userfrom,userto)) g.database.execute(query) g.conn.commit()", "email = g.database.execute(\"\"\" SELECT * from MuShMe.entries where Email_id=\"%s\" \"\"\"", "g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database =", "for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic FROM albums", "recommend def getRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend", "MuShMe.playlists WHERE User_id=\"%s\" \"\"\" % userid) for p in g.database.fetchall():", "return render_template('about.html') @app.route('/changepwd') def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout():", "session['userid'] )) if check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid))", "friendName.append(data) print friendName return friendName def getPlaylist(userid): playlist = []", "VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def", "are belong to us. @app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To", "app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import FileHandler file_handler", "in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1] data['userto']=a[2] g.database.execute(\"\"\" SELECT", "def getUserData(userid): User = [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries", "getUserData(userid): User = [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from entries where", "songName.append(data) return songName def getUserData(userid): User = [] g.database.execute(\"\"\" SELECT", "\"\"\" % songid) for song in g.database.fetchall(): d = {}", "userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT Status from requests", "url_for, redirect from Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm,", "WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else:", "FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0]", "songid) for song in g.database.fetchall(): d = {} d['title']=song[0] d['album']", "= searchform.entry.data + '%' search_fname = [] search_song= [] search_friend", "redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method == 'POST': reportform", "g.database.fetchone()[0] #print data enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id)", "(reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report == True: g.conn.commit()", "redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if", "= length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST':", "d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid data['song'].append(d)", "g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5]", "Request_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={} data['reqid']=a[0]", "SELECT * from MuShMe.entries where Username=\"%s\" \"\"\" % username) if", "g.database.execute(\"\"\"SELECT Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\" \"\"\" % song) for", "g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where Recommend_id=\"%s\" \"\"\" % a[0])", "enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" %", "Username from entries where User_id='%s' \"\"\" % a[1]) data['userfromname'] =", "data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT Username FROM entries", "User_id1=\"%s\" \"\"\" % (userid)) for user in g.database.fetchall(): data =", "return True else: return False def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM", "+ '%' search_fname = [] search_song= [] search_friend = []", "LIMIT 5\" % (userid)) commentids = g.database.fetchall() retval = []", "(albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method == 'POST':", "return True @app.route('/search',methods=['POST','GET']) def search(): if request.method == 'POST': searchform", "friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2", "from friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT", "g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" \"\"\" % userid)", "userid) for a in g.database.fetchall(): data={} data['rid']=a[0] data['userfrom'] = a[1]", "+ len(search_friend) + len(search_artist) + len(search_fname) return render_template('searchpage/search.html', entry=searchform.entry.data,form6=searchForm(prefix='form6'), search_song=search_song,", "g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if", "= g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "=[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\" % userid)", "Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid)) print query", "#For the songs app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the", "from requests where Request_to=\"%s\" and Request_from=\"%s\" \"\"\" % (userfrom,userto)) if", "Album_pic FROM albums WHERE Album_id=%s \" % (a[2])) g.conn.commit() data['art']", "MuShMe.entries WHERE User_id=\"%s\" \"\"\" % p[2]) for k in g.database.fetchall():", "import hashlib from flask import g mail = Mail() mail.init_app(app)", "form.errors.items(): for error in errors: flash(u\"Error in the %s field", "commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0] data['userid']", "\"\"\" % ( value )) for a in g.database.fetchall(): data", "from werkzeug import secure_filename from werkzeug import SharedDataMiddleware from api", "MuShMe.entries where Email_id=\"%s\" \"\"\" % email) name = g.database.execute(\"\"\" SELECT", "werkzeug import secure_filename from werkzeug import SharedDataMiddleware from api import", "commentform = CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if commentform.comment.data:", "name: return False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if", "from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in']", "a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] friendName.append(data) for f in friendName:", "@app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO", "MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if", "if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup = g.database.execute(\"\"\"INSERT into MuShMe.entries (Username,Email_id,Pwdhash,Name)", "else: return False g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\"", "g.database.execute(\"\"\"UPDATE MuShMe.entries SET Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit()", "User_id, Profile_pic from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % user[0]) for", "= (\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\"", "valid data !\") else: flash(\"Username or Email has been taken\")", "[] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\" %", "= g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE \"%s\" \"\"\"", "DOB from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone())", "g.database.execute(\"\"\" SELECT * from MuShMe.entries where Username=\"%s\" \"\"\" % username)", "data = g.database.fetchone()[0] #print data enter_comment = g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments", "g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\" % userid) for", "Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from", "'%' search_fname = [] search_song= [] search_friend = [] search_playlist", "session[\"dob\"]=str(g.database.fetchone()) newPlaylist = session['UserName'] + ' default collection' g.database.execute(\"\"\"INSERT INTO", "a[1]) for i in g.database.fetchall(): d={} d['userid'] = i[0] d['username']", "name = g.database.execute(\"\"\" SELECT * from MuShMe.entries where Username=\"%s\" \"\"\"", "data['playlist'] = [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from playlists where Playlist_id=\"%s\"", "search_playlist =[] search_artist = [] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from", "= request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE", "def userProfile(userid): if session['logged_in'] == False: return render_template('error.html'), 404 else:", "session['logged_in']=False return render_template('login.html') if not app.debug: import logging from logging.handlers", "WHERE Album_id=%s \" % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data)", "session['UserName']=g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Privilege FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid)", "playlistid = g.database.fetchone()[0] data['playlist'] = [] g.database.execute(\"\"\" SELECT Playlist_name,Playlist_id,User_id from", "data !\") else: flash(\"Username or Email has been taken\") else:", "g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends WHERE", "a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User def getAllRecommend(userid):", "g.database.fetchall(): d['username']=k[0] d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid):", "% uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE", "albums WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if", "def changepwd(): return render_template('changepwd.html') @app.route('/logout') def logout(): if 'email' not", "addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name,", "i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data) return request", "(commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] = commentdata[0]", "render_template, session, request, flash, url_for, redirect from Forms import ContactForm,", "User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True", "deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in playlist: g.database.execute(\"\"\"DELETE FROM", "userid=userid)) def allowed_file(filename): return '.' in filename and \\ filename.rsplit('.',", "% user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data)", "return friendName def getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from", "return User def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from", "% a[1]) for i in g.database.fetchall(): d={} d['userid'] = i[0]", "else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if request.method=='POST': addplaylistform =", "= g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM albums WHERE Album_id=%s\", (albumname)) return", "len(search_playlist) + len(search_song) + len(search_friend) + len(search_artist) + len(search_fname) return", "User_id=\"%s\" \"\"\" % a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1]", "Song_title,Song_id,Song_Album from MuShMe.songs WHERE Song_id=\"%s\" \"\"\" % song) for a", "pymysql import hashlib from flask import g mail = Mail()", "if a or b: return True elif userid == f['friendid']:", "User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid)) if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id", "\"\"\" % userid) for p in g.database.fetchall(): data = {}", "SELECT Username, Name,User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % p[2])", "flask import Flask, render_template, session, request, flash, url_for, redirect from", "g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES", "hashlib.sha1(contactform.password.data).hexdigest())) user_id = g.database.fetchone() for uid in user_id: session['userid'] =", "\"\"\" % a[1]) for i in g.database.fetchall(): d={} d['userid'] =", "redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\"", "Song_Album FROM songs WHERE song_id=%s\", (songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT", "(playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your profile are belong", "app.register_blueprint(SONG); #For the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin);", "User.append(data) return User def getAllRecommend(userid): recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to", "Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year'] !=", "!= '0' and request.form['birthday_day'] != '0': g.database.execute(\"\"\"UPDATE MuShMe.entries SET DOB=\"%s-%s-%s\"", "data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song) + len(search_friend) +", ")) if check_report == True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else:", "Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[]", "\"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def", "{} d['title']=song[0] d['album'] = song[1] d['genre'] = song[2] d['publisher'] =", "requests SET Status=\"%s\" WHERE Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto))", "session[\"login\"] = True session[\"signup\"] = False if request.method == 'POST':", "searchForm(prefix='form6') #print 'f' value = searchform.entry.data + '%' search_fname =", "g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist", "the %s field - %s\" % ( getattr(form, field).label.text, error", "if request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0' and request.form['birthday_day']", "userid) for p in g.database.fetchall(): data = {} data['pname']=p[0] data['pid']=p[1]", "AddPlaylist from flask.ext.mail import Message, Mail from werkzeug import secure_filename", "data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\" %", "SET DOB=\"%s-%s-%s\" WHERE User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return", "Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s Function: %(funcName)s Time:", "allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath = UPLOAD_FOLDER +", "return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST':", "def getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY", "src import app import os import shutil from flask import", "else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid): if session['logged_in'] == False:", "recommend =[] g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" \"\"\"", "User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(\"\"\"", "SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from logging import", "app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER'] }) @app.route('/user/<rcvrid>.<senderid>/comment',methods=['POST','GET']) def", "in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length = len(search_playlist) + len(search_song)", "MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\" % data) #print g.database.fetchone()[0] return redirect(url_for('userProfile',", "WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if a or", "[] search_song= [] search_friend = [] search_playlist =[] search_artist =", "for a in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4]", "senderid): if request.method == 'POST': commentform = CommentForm(request.form, prefix='form4') #print", "and userfrom!=userto: return False else: return True @app.route('/search',methods=['POST','GET']) def search():", "from MuShMe.songs WHERE Song_title LIKE \"%s\" \"\"\" % ( value", "MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES (\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data,", "request.files['file'] if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))", "prefix='form4') #print senderid #print rcvrid if commentform.comment.data: query = (\"\"\"INSERT", "if enter_comment: g.conn.commit() g.database.execute(\"\"\"SELECT User_id FROM MuShMe.user_comments WHERE Comment_id=\"%s\" \"\"\"", "render_template('error.html'), 404 else: if request.method == 'GET': User=getUserData(userid) return render_template('userprofile/index.html',", "API from songs import SONG from playlist import playlist from", "FROM songs WHERE song_id=%s\", (songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic", "== \"\"\"__main__\"\"\": # To allow aptana to receive errors, set", "(request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def", "% song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT Album_pic", "= {} data['artistname']=a[0] data['artistid']=a[1] search_artist.append(data) check_friend = g.database.execute(\"\"\"SELECT Username, Name,", "User_id=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={} data['username']=a[0]", "from flask.ext.mail import Message, Mail from werkzeug import secure_filename from", "INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return", "if request.method == 'POST': loginform = LoginForm(request.form, prefix='form1') if loginform.validate_on_submit():", "WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid): if request.method", "= {} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id=\"%s\"", "in g.database.fetchall(): d={} d['userid'] = i[0] d['username'] = i[1] d['name']", "(loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP()", "== 'POST': reportform = ReportForm(request.form, prefix='form5') print reportform.report.data check_report =", "% a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] = []", "import Message, Mail from werkzeug import secure_filename from werkzeug import", "data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT", "not app.debug: import logging from logging.handlers import SMTPHandler mail_handler =", "= g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs", "query = (\"\"\"INSERT INTO MuShMe.comments (comment_type, Comment, User_id) VALUES (\"%s\",\"%s\",\"%s\")", "\"\"\" % song) for a in g.database.fetchall(): data['songname']=a[0] data['songid']=a[1] g.database.execute(\"SELECT", "VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid)) print query g.database.execute(query) g.conn.commit()", "% (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom):", "g.database.execute(\"SELECT Comment, User_id FROM comments WHERE Comment_id=%s\", (commentid[0])) commentdata =", "user[0]) for a in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT", "\"\"\" % (commentform.comment.data)) data = g.database.fetchone()[0] #print data enter_comment =", "ORDER BY Comment_id DESC\" % (userid)) commentids = g.database.fetchall() retval", "% (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid data", "= False if request.method == 'POST': loginform = LoginForm(request.form, prefix='form1')", "d['uname']=k[1] d['userid']=k[2] data['playlist'].append(d) recommend.append(data) return recommend def getRequest(userid): request =[]", ")) @app.route('/signup', methods=['POST']) def signup(): session[\"signup\"] = True session[\"login\"] =", "render_template('homepage/index.html', form1=loginform, form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for field,", "!\") else: flash(\"Username or Email has been taken\") else: flash_errors(contactform)", "FROM albums WHERE Album_id=%s\", (albumname)) return g.database.fetchone()[0] @app.route('/user/<userid>/edit',methods=['POST','GET']) def editName(userid):", "% (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" and User_id2=\"%s\"", "from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT", "SELECT Status from requests where Request_to=\"%s\" and Request_from=\"%s\" \"\"\" %", "% ( value )) for a in g.database.fetchall(): data =", "logging import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module:", "\"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] check_song = g.database.execute(\"\"\" SELECT", "User_id=%s LIMIT 5\"\"\" % userid) for song in g.database.fetchall(): data", "playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s \"\"\" %", "g.database.execute(\"\"\"SELECT Comment_id from MuShMe.comments WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data)) data", "enter valid data !\") else: flash(\"Username or Email has been", "import Formatter mail_handler.setFormatter(Formatter(''' Message type: %(levelname)s Location: %(pathname)s:%(lineno)d Module: %(module)s", "User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\"", "SET Name=%s WHERE User_id=%s \"\"\", ([request.form['editname']], userid)) g.conn.commit() if request.form['birthday_year']", "flash_errors(form): for field, errors in form.errors.items(): for error in errors:", "g.database.fetchone()[0] data['song'] = [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs where", "if session['logged_in'] == False: return render_template('error.html'), 404 else: if request.method", "def logout(): if 'email' not in session: return render_template('error.html') session['logged_in']=False", "import logging from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>',", "g.database.fetchone()[0] retval.append(data) return retval def getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments", "def addplaylist(userid): if request.method=='POST': addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists", "data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\"", "WHERE User_id2=\"%s\" \"\"\" % userid) for user in g.database.fetchall(): data", "def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT Status from requests where", "check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\"", "and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if a or b: return", "report(userid,commentid): if request.method == 'POST': reportform = ReportForm(request.form, prefix='form5') print", "= g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE", "for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT", "addplaylistform = AddPlaylist(prefix='form7') g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\"", "redirect(url_for('userProfile', userid=userid)) @app.route('/user/<uidto>.<uidfrom>/request',methods=['POST']) def sendrequest(uidto,uidfrom): if request.method == 'POST': if", "playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists WHERE User_id=\"%s\" \"\"\"", "playlists WHERE Playlist_id=%s and User_id=%s \"\"\" % (playlistid, userid)) g.conn.commit()", "UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER']", "\"\"\" % uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE", "value )) for a in g.database.fetchall(): data = {} data['artistname']=a[0]", "recommend def getRequest(userid): request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests", "(\"%s\",\"%s\") \"\"\" % (userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST'])", "contactform = ContactForm(request.form, prefix='form2') if contactform.validate_on_submit(): if validate(contactform.email.data,contactform.username.data): check_signup =", "g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid)", "WHERE User_id1=\"%s\" \"\"\" % (userid)) for user in g.database.fetchall(): data", "set use_debugger=False app = create_app(config=\"\"\"config.yaml\"\"\") if app.debug: use_debugger = True", "ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] = 'src/static/' +", "from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" % (userid,f['friendid'])) if", "in g.database.fetchall(): data={} data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data)", "= g.database.execute(\"\"\"SELECT Username, Name, Profile_pic, User_id from MuShMe.entries WHERE Username", "uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid", "playlistid in playlist: g.database.execute(\"\"\"DELETE FROM playlists WHERE Playlist_id=%s and User_id=%s", "g.conn.close() @app.route('/login', methods=['POST']) def login(): session[\"login\"] = True session[\"signup\"] =", "return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email = g.database.execute(\"\"\" SELECT", "Forms import ContactForm, LoginForm, editForm, ReportForm, CommentForm, searchForm, AddPlaylist from", "request, flash, url_for, redirect from Forms import ContactForm, LoginForm, editForm,", "= [] g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\"", "secure_filename from werkzeug import SharedDataMiddleware from api import API from", "user_comments WHERE User_id=%s ORDER BY Comment_id DESC LIMIT 5\" %", "where User_id='%s' \"\"\" % a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname']", "(Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile',", "%s\" % ( getattr(form, field).label.text, error )) @app.route('/signup', methods=['POST']) def", "Comment_id=%s\", (commentid[0])) commentdata = g.database.fetchone() data = {} data['comment'] =", "data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName def getPlaylist(userid):", "app.register_blueprint(admin); #for the artist pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS", "Playlist_id from recommend_playlists where Recommend_id=\"%s\" \"\"\" % a[0]) if check_playlist:", "== True: g.conn.commit() return redirect(url_for('userProfile', userid=userid)) else: return redirect(url_for('userProfile', userid=userid))", "requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT Status from requests where Request_to=\"%s\"", "return True else: return False g.database.execute(\"\"\"SELECT User_id1 from friends WHERE", "\"\"\" % ( value, value )) for a in g.database.fetchall():", "or name: return False else: return True @app.route('/user/<userid>',methods=['GET']) def userProfile(userid):", "WHERE User_id=\"%s\" \"\"\" % userid) for p in g.database.fetchall(): data", "friendName def getPlaylist(userid): playlist = [] g.database.execute(\"\"\"SELECT Playlist_name,Playlist_id from MuShMe.playlists", "Song_id from recommend_songs where Recommend_id=\"%s\" \"\"\" % a[0]) if check_song:", "redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.' in filename and \\", "INSERT INTO requests (Request_from,Request_to,Status) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % (uidfrom,uidto,1)) g.database.execute(query)", "FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC\" % (userid))", "song[3] d['songid'] = songid data['song'].append(d) check_playlist = g.database.execute(\"\"\" SELECT Playlist_id", "= \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg', 'jpeg', 'gif']) app.config['UPLOAD_FOLDER'] =", "data['status']=a[3] data['reqfromuser'] = [] g.database.execute(\"\"\" SELECT User_id,Username,Name from entries where", "\"\"\" % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto: return", "data['profilepic'] = a[2] data['privilege']=a[3] data['email']=a[4] data['name']=a[5] data['dob']=str(a[6]) User.append(data) return User", "FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter", "entries where User_id=\"%s\" \"\"\" % userid) for a in g.database.fetchall():", "User_id=\"%s\" \"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from", "Flask's debugger if external debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA'))", "\"\"\" % uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_pic FROM MuShMe.entries", "g.database.fetchall(): data={} data['reqid']=a[0] data['reqfrom'] = a[1] data['reqto']=a[2] data['status']=a[3] data['reqfromuser'] =", "(Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\",", "Request_to=\"%s\" and Request_from=\"%s\" \"\"\" % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1'", "entries WHERE User_id=%s\", (data['userid'])) data['username'] = g.database.fetchone()[0] retval.append(data) return retval", "Status from requests where Request_to=\"%s\" and Request_from=\"%s\" \"\"\" % (userfrom,userto))", "songs WHERE song_id=%s\", (songid)) albumname = g.database.fetchone()[0] g.database.execute(\"SELECT Album_pic FROM", "g.conn.commit() query = (\"\"\" INSERT INTO friends Values (\"%s\",\"%s\") \"\"\"", "requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger, threaded=True, port=8080)", "% email) name = g.database.execute(\"\"\" SELECT * from MuShMe.entries where", "(\"%s\",\"%s\",\"%s\",\"%s\") \"\"\" % (reportform.report.data, reportform.other.data, commentid, session['userid'] )) if check_report", "retval def getComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER", "% (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid))", "\"\"\" % uid) session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT", "d['album'] = song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid']", "d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data request.append(data)", "WHERE User_id=\"%s\" \"\"\" % p[2]) for k in g.database.fetchall(): d['username']=k[0]", "index(): session[\"login\"] = False session[\"signup\"] = False session[\"logged_in\"] = False", "g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check = g.database.execute(\"\"\" SELECT", "from entries where User_id=\"%s\" \"\"\" % userid) for a in", "data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(\"\"\" SELECT Song_id", "if email or name: return False else: return True @app.route('/user/<userid>',methods=['GET'])", "CommentForm(request.form, prefix='form4') #print senderid #print rcvrid if commentform.comment.data: query =", "methods=[\"POST\"]) def deleteplaylist(userid): playlist = request.form.getlist('playlistselect') for playlistid in playlist:", "import API from songs import SONG from playlist import playlist", "SharedDataMiddleware from api import API from songs import SONG from", "getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id1=\"%s\" \"\"\"", "a in g.database.fetchall(): data = {} data['username']=a[0] data['name']=a[1] data['profilepic']=a[2] data['userid']=a[3]", "commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT Username FROM entries WHERE User_id=%s\",", "playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\"", "=[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\" \"\"\" %", "MuShMe.entries where Username=\"%s\" \"\"\" % username) if email or name:", "Time: %(asctime)s Message: %(message)s ''')) if __name__ == \"\"\"__main__\"\"\": #", "return redirect(url_for('userProfile',userid=userid)) else: return redirect(url_for('userProfile', userid=userid)) def allowed_file(filename): return '.'", "Request_from=\"%s\" and Request_to=\"%s\" \"\"\" % (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile',", "{} d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id from MuShMe.entries WHERE", "(addplaylistform.add.data,userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) @app.route(\"/playlist/<userid>/deleteplaylist\", methods=[\"POST\"]) def deleteplaylist(userid): playlist =", "recommend_playlists where Recommend_id=\"%s\" \"\"\" % a[0]) if check_playlist: playlistid =", "Request_from=\"%s\" \"\"\" % (userfrom,userto)) if check and g.database.fetchone()[0]=='-1' and userfrom!=userto:", "#print 'f' value = searchform.entry.data + '%' search_fname = []", "return redirect(url_for(('index'))) def flash_errors(form): for field, errors in form.errors.items(): for", "errors: flash(u\"Error in the %s field - %s\" % (", "Playlist_name LIKE \"%s\" \"\"\" % ( value )) for a", "\"\"\" % playlistid) for p in g.database.fetchall(): d= {} d['pname']=p[0]", "data['title']=a[0] data['album']=a[1] data['genre']=a[2] data['publisher']=a[3] data['songid']=a[4] data['art']=getSongArt(a[4]) search_song.append(data) check_artist = g.database.execute(\"\"\"SELECT", "session['profilepic'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method", "search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid): if", "session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid #print userid return", "form2=ContactForm(prefix='form2')) else: return redirect(url_for(('index'))) def flash_errors(form): for field, errors in", "= [] check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title", "userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid),", "newPlaylist = session['UserName'] + ' default collection' g.database.execute(\"\"\"INSERT INTO MuShMe.playlists", "= [] g.database.execute(\"\"\" SELECT Song_title,Song_Album,Genre,Publisher from songs where Song_id=\"%s\" \"\"\"", "check_playlist = g.database.execute(\"\"\" SELECT Playlist_id from recommend_playlists where Recommend_id=\"%s\" \"\"\"", "from logging import FileHandler file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from", "g.database.execute(\"\"\" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "def tos(): return render_template('tos.html') @app.route('/about') def about(): return render_template('about.html') @app.route('/changepwd')", "% (-1,userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) def requestvalidate(userfrom,userto): check", "coding: utf-8 -*- from src import app import os import", "= False session[\"signup\"] = False session[\"logged_in\"] = False return render_template('homepage/index.html',", "redirect(url_for('userProfile',userid=userid)) #All your profile are belong to us. @app.route('/artist/<artistid>') def", "(Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else:", "False g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\" \"\"\" % userid)", "Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT User_id2", "redirect(url_for(('index'))) def flash_errors(form): for field, errors in form.errors.items(): for error", "def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8')", "methods=['GET', 'POST']) def upload_file(userid): if request.method == 'POST': file =", "songs where Song_id=\"%s\" \"\"\" % songid) for song in g.database.fetchall():", "uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True session['logged_in']=True #print uid #print", "{} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from MuShMe.entries WHERE User_id=\"%s\" \"\"\"", "= [] search_song= [] search_friend = [] search_playlist =[] search_artist", "d['pname']=p[0] d['pid']=p[1] g.database.execute(\"\"\" SELECT Username, Name,User_id from MuShMe.entries WHERE User_id=\"%s\"", "= g.database.fetchone() for uid in user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT", "check_song = g.database.execute(\"\"\"SELECT Song_title,Song_Album,Genre,Publisher,Song_id from MuShMe.songs WHERE Song_title LIKE \"%s\"", "Album_id=%s \" % (a[2])) g.conn.commit() data['art'] = g.database.fetchone()[0] songName.append(data) return", "!= '0' and request.form['birthday_month'] != '0' and request.form['birthday_day'] != '0':", "% userid) for a in g.database.fetchall(): data={} data['username']=a[0] data['userid']=a[1] data['profilepic']", "g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id from MuShMe.entries WHERE", "commentid, session['userid'] )) if check_report == True: g.conn.commit() return redirect(url_for('userProfile',", "taken\") else: flash_errors(contactform) return render_template('homepage/index.html', form1=LoginForm(prefix='form1'), form2=contactform) def validate(email,username): email", "LIKE \"%s\" \"\"\" % ( value, value )) for a", "return render_template('userprofile/index.html', userid=userid, form4=CommentForm(prefix='form4'), form3=editForm(prefix='form3'), form6=searchForm(prefix='form6'), form5=ReportForm(prefix='form5'),form7=AddPlaylist(prefix='form7'), friend=getFriend(userid), playlist=getPlaylist(userid), User=getUserData(userid),", "= searchForm(prefix='form6') #print 'f' value = searchform.entry.data + '%' search_fname", "teardown_request(exception): g.conn.close() @app.route('/login', methods=['POST']) def login(): session[\"login\"] = True session[\"signup\"]", "User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) b=g.database.execute(\"\"\"SELECT User_id2 from friends", "User_id=\"%s\" \"\"\" % (userid)) g.conn.commit() for uid in userid: session['userid']", "MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid))", "MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\" % userid) for song in", "file_handler = FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter('''", "a[1]) for k in g.database.fetchall(): data['username']=k[0] data['uname']=k[1] search_playlist.append(data) length =", "= True session[\"signup\"] = False if request.method == 'POST': loginform", "g.database.fetchone()[0] retval.append(data) return retval def getFriend(userid): friendName =[] g.database.execute(\"\"\"SELECT User_id2", "g.database.fetchall(): data = {} g.database.execute(\"\"\"SELECT Username, User_id, Profile_pic from MuShMe.entries", "getSongArt(songid): g.database.execute(\"SELECT Song_Album FROM songs WHERE song_id=%s\", (songid)) albumname =", "userProfile(userid): if session['logged_in'] == False: return render_template('error.html'), 404 else: if", "= commentdata[0] data['userid'] = commentdata[1] data['commentid'] = commentid[0] g.database.execute(\"SELECT Username", "song[1] d['genre'] = song[2] d['publisher'] = song[3] d['songid'] = songid", "if file and allowed_file(file.filename): filename = secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) filepath", "(userfrom,userto)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if", "songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName =[] g.database.execute(\"\"\"SELECT", "a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\" %", "from songs import SONG from playlist import playlist from admin", "g.database.execute(\"\"\" SELECT User_id,Username,Name from entries where User_id='%s' \"\"\" % a[1])", "request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\" \"\"\"", "+ UPLOAD_FOLDER @app.route('/') def index(): session[\"login\"] = False session[\"signup\"] =", "#For database connections. @app.before_request def before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306,", "request.form['birthday_year'] != '0' and request.form['birthday_month'] != '0' and request.form['birthday_day'] !=", "from playlist import playlist from admin import admin from artist", "os import shutil from flask import Flask, render_template, session, request,", "friends WHERE User_id1=\"%s\" and User_id2=\"%s\" \"\"\" % (userid,f['friendid'])) if a", "songName def getUserData(userid): User = [] g.database.execute(\"\"\" SELECT Username,User_id,Profile_pic,Privilege,Email_id,Name,DOB from", "form1=LoginForm(prefix='form1'), form2=ContactForm(prefix='form2')) #For database connections. @app.before_request def before_request(): g.conn =", "True else: return False g.database.execute(\"\"\"SELECT User_id1 from friends WHERE User_id2=\"%s\"", "i[0] d['username'] = i[1] d['name'] = i[2] data['reqfromuser'].append(d) print data", "g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC\"", "g.database.execute(\"\"\" SELECT Recommend_id,User_id_from,User_id_to from recommend where User_id_to=\"%s\" LIMIT 5 \"\"\"", "from logging.handlers import SMTPHandler mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication", "getRequest(userid): request =[] g.database.execute(\"\"\" SELECT Request_id,Request_from,Request_to,Status from requests where Request_to=\"%s\"", "search_artist=search_artist,friends=search_friend, search_playlist=search_playlist,length = length) else: return render_template('searchpage/search.html',form6=searchForm(prefix='form6')) @app.route('/user/<userid>/addplaylist',methods=['POST']) def addplaylist(userid):", "errors in form.errors.items(): for error in errors: flash(u\"Error in the", "def validate(email,username): email = g.database.execute(\"\"\" SELECT * from MuShMe.entries where", "True try: # Disable Flask's debugger if external debugger is", "def getAllComments(userid): g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY", "reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description, Comment_id,reported_by) VALUES", "@app.route('/user/<userid>/file', methods=['GET', 'POST']) def upload_file(userid): if request.method == 'POST': file", "User=getUserData(userid), Comments=getComments(userid), songs=getSong(userid), Recommends=getRecommend(userid), Requests=getRequest(userid),frnd=checkFriend(userid,User), AllComments=getAllComments(userid), AllRecommends=getAllRecommend(userid)) def checkFriend(userid,User): friendName", "= FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message", "in g.database.fetchall(): data['friendname']=a[0] data['friendid']=a[1] data['friendpic']=a[2] friendName.append(data) print friendName return friendName", "userid=userto)) @app.route('/user/<userto>.<userfrom>/reject',methods=['POST']) def rejectrequest(userto,userfrom): if request.method == 'POST': query=(\"\"\" UPDATE", "Artist_id from MuShMe.artists WHERE Artist_name LIKE \"%s\" \"\"\" % (", "User_id=\"%s\" \"\"\" % uid ) session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries", "False: return render_template('error.html'), 404 else: if request.method == 'GET': User=getUserData(userid)", "WHERE User_id=\"%s\" \"\"\" % uid ) session[\"dob\"]=str(g.database.fetchone()) session['logged_in'] = True", "WHERE Playlist_name LIKE \"%s\" \"\"\" % ( value )) for", "where Request_to=\"%s\" \"\"\" % userid) for a in g.database.fetchall(): data={}", "Song_title LIKE \"%s\" \"\"\" % ( value )) for a", "Username from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid ) session['UserName']=g.database.fetchone()[0]", "\"\"\" % a[0]) if check_playlist: playlistid = g.database.fetchone()[0] data['playlist'] =", "'email' not in session: return render_template('error.html') session['logged_in']=False return render_template('login.html') if", "a[1]) data['userfromname'] = g.database.fetchone()[0] print data['userfromname'] check_song = g.database.execute(\"\"\" SELECT", "= song[3] d['songid'] = songid d['songart'] = getSongArt(songid) data['song'].append(d) check_playlist", "admin from artist import artist import pymysql import hashlib from", "request.method == 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status)", "User_id) VALUES (\"%s\",\"%s\",\"%s\") \"\"\" % ('U',commentform.comment.data, senderid)) print query g.database.execute(query)", "request.method == 'POST': query=(\"\"\" UPDATE requests SET Status=\"%s\" WHERE Request_from=\"%s\"", "User_id=\"%s\" \"\"\" % userid) for p in g.database.fetchall(): data =", "= [] for commentid in commentids: g.database.execute(\"SELECT Comment, User_id FROM", "userid print request.form if request.form['editname'] != '': g.database.execute(\"\"\"UPDATE MuShMe.entries SET", "return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def acceptrequest(userto,userfrom): if request.method == 'POST':", "= g.database.execute(\"\"\"INSERT INTO MuShMe.user_comments (Comment_id, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (data,rcvrid))", "DESC\" % (userid)) commentids = g.database.fetchall() retval = [] for", "artist pages app.register_blueprint(artist); UPLOAD_FOLDER = \"img/ProfilePic/\" ALLOWED_EXTENSIONS = set(['png', 'jpg',", "User_id=\"%s\" \"\"\" % (request.form['birthday_year'],request.form['birthday_month'],request.form['birthday_day'], userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) else: return", "g.database.execute(\"\"\"SELECT Song_id from MuShMe.user_song WHERE User_id=%s LIMIT 5\"\"\" % userid)", "g.database.fetchone()[0] return redirect(url_for('userProfile', userid=rcvrid)) @app.route('/user/<userid>/<commentid>/report',methods=['POST','GET']) def report(userid,commentid): if request.method ==", "[] for commentid in commentids: g.database.execute(\"SELECT Comment, User_id FROM comments", "g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\" \"\"\" % (userid))", "User_id=%s \"\"\" % (playlistid, userid)) g.conn.commit() return redirect(url_for('userProfile',userid=userid)) #All your", "(Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, )) if check_signup:", "editName(userid): if request.method == 'POST': uid = userid print request.form", "from artist import artist import pymysql import hashlib from flask", "FileHandler('log.txt') file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) from logging import Formatter mail_handler.setFormatter(Formatter(''' Message type:", "Name, Profile_pic, User_id from MuShMe.entries WHERE Username LIKE \"%s\" or", "BY Comment_id DESC\" % (userid)) commentids = g.database.fetchall() retval =", "mail.init_app(app) #For the collector script. app.register_blueprint(API); #For the songs app.register_blueprint(SONG);", "g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND Pwdhash=\"%s\" \"\"\" %", "b: return True elif userid == f['friendid']: return True else:", "#For the playlist app.register_blueprint(playlist); #for the admin pages app.register_blueprint(admin); #for", "MuShMe.artists WHERE Artist_name LIKE \"%s\" \"\"\" % ( value ))", "userid == f['friendid']: return True else: return False g.database.execute(\"\"\"SELECT User_id1", "before_request(): g.conn = pymysql.connect(host='127.0.0.1', port=3306, user='root', passwd='<PASSWORD>', db='MuShMe', charset='utf8') g.database", "if check_login: userid= g.database.fetchone() g.database.execute(\"\"\"UPDATE MuShMe.entries SET Last_Login=CURRENT_TIMESTAMP() WHERE User_id=\"%s\"", "userid=userid)) app.add_url_rule('/user/uploads/<filename>', 'uploaded_file',build_only=True) app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {'/user/uploads': 'src/static' + app.config['UPLOAD_FOLDER']", "== 'POST': if requestvalidate(uidfrom,uidto): query=(\"\"\" INSERT INTO requests (Request_from,Request_to,Status) VALUES", "session['profilepic'] = g.database.fetchone()[0] session['logged_in'] = True g.database.execute(\"\"\"SELECT Name from MuShMe.entries", "prefix='form5') print reportform.report.data check_report = g.database.execute(\"\"\"INSERT INTO MuShMe.complaints (Complain_type, Complain_description,", "where Song_id=\"%s\" \"\"\" % songid) for song in g.database.fetchall(): d", "AND Pwdhash=\"%s\" \"\"\" % (loginform.email.data, hashlib.sha1(loginform.password.data).hexdigest())) if check_login: userid= g.database.fetchone()", "into MuShMe.entries (Username,Email_id,Pwdhash,Name) VALUES (\"%s\",\"%s\",\"%s\",\"%s\")\"\"\" % (contactform.username.data, contactform.email.data, hashlib.sha1(contactform.password.data).hexdigest(),contactform.name.data, ))", "uid) session['privilege'] = g.database.fetchone()[0] g.database.execute(\"\"\"SELECT Profile_Pic FROM MuShMe.entries WHERE User_id=\"%s\"", "g.database.execute(\"SELECT Comment_id FROM user_comments WHERE User_id=%s ORDER BY Comment_id DESC", "True g.database.execute(\"\"\"SELECT Name from MuShMe.entries WHERE User_id=\"%s\" \"\"\" % uid", "WHERE Song_id=\"%s\" \"\"\" % song) for a in g.database.fetchall(): data['songname']=a[0]", "debugger is requested use_debugger = not(app.config.get('DEBUG_WITH_APTANA')) except: pass app.run(use_debugger=use_debugger, use_reloader=use_debugger,", "@app.route('/artist/<artistid>') def artistProfile(artistid): return render_template('artistpage/index.html',form6=searchForm(prefix='form6')) #To handle 404 not found", "User_id1 from friends WHERE User_id2=\"%s\" \"\"\" % userid) for user", "g.database.execute(\"\"\"INSERT INTO MuShMe.playlists (Playlist_name, User_id) VALUES (\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit()", "WHERE User_id=%s ORDER BY Comment_id DESC LIMIT 5\" % (userid))", "'POST': searchform = searchForm(prefix='form6') #print 'f' value = searchform.entry.data +", "(\"%s\",\"%s\")\"\"\" % (newPlaylist,uid)) g.conn.commit() return redirect(url_for('userProfile',userid=uid)) else: flash(\"Please enter valid", "\"\"\" % (uidfrom,uidto,1)) g.database.execute(query) g.conn.commit() return redirect(url_for('userProfile', userid=uidto)) @app.route('/user/<userto>.<userfrom>/accept',methods=['POST']) def", "from MuShMe.comments WHERE Comment=\"%s\" \"\"\" % (commentform.comment.data)) data = g.database.fetchone()[0]", ") session[\"Name\"]=g.database.fetchone() g.database.execute(\"\"\"SELECT DOB from MuShMe.entries WHERE User_id=\"%s\" \"\"\" %", "in user_id: session['userid'] = uid g.database.execute(\"\"\"SELECT Username from MuShMe.entries WHERE", "friendName: a=g.database.execute(\"\"\"SELECT User_id2 from friends WHERE User_id2=\"%s\" and User_id1=\"%s\" \"\"\"", "mail_handler = SMTPHandler('127.0.0.1', '<EMAIL>', app.config['DEFAULT_MAIL_SENDER'], 'YourApplication Failed') mail_handler.setLevel(logging.ERROR) app.logger.addHandler(mail_handler) from", "login(): session[\"login\"] = True session[\"signup\"] = False if request.method ==", "if check_signup: g.conn.commit() g.database.execute(\"\"\"SELECT User_id from MuShMe.entries WHERE Email_id=\"%s\" AND" ]
[ "import tokenization from language.labs.drkit import input_fns import numpy as np", "result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction == gt_answer[qas_id]: num_correct +=", "a, stats]) if stats[0] == 0. and stats[3] > 0.:", "= {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if supervision ==", "np.uint32, np.uint64)): return int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):", "example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return answers def evaluate(answers, predictions,", "[] for char in text: if char in PUNCTUATION: continue", "2.0 (the \"License\"); # you may not use this file", "= [] articles = {\"the\", \"a\", \"an\", \"and\", \"\"} for", "100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file", "\"a\", \"an\", \"and\", \"\"} for t in answer.strip().lower().split(): tok =", "i] = num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s): \"\"\"Lower", "A list of result dicts from running estimator.predict. name_map: A", "ground truth answers.\"\"\" answers = {} f = tf.gfile.Open(gold_file) if", "micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and", "= compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics = { \"accuracy\":", "F.\"\"\" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise", "json.load(fin) telemetry, incorrect = [], [] n = 0 for", "0. if any(rr < 10 for rr in ranks): single_acc", "t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not in", "results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or", "for OneHopDataset or TwoHopDataset. Args: dataset: An object of type", "thresholds.\"\"\" for thresh in [5, 100, 500, 1000]: freq_stats, freq_total", "estimator.predict. name_map: A mapping from prediction indices to text strings.", "= 0 for key in data_: if key not in", "example = json.loads(line) if i == 0 and \"header\" in", "flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special", "Non-accuracy stats layer_weights += result[\"layer_probs\"] layer_entities = {i: set() for", "if answer in gold: result[0] += 1 else: if not", "metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _, _ = compute_scores(dataset.gt_file,", "predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with relation", "return obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file,", "_, sys_pos, real_pos = sum([x[-1] for x in g]) if", "A mapping from prediction indices to text strings. output_prediction_file: File", "= name_map[str(prediction)] accuracy = num_correct / len(all_predictions) # Compute advanced", "(2 * precision * recall) / (precision + recall) return", "predictions = json.load(fin) telemetry, incorrect = [], [] n =", "import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS", "100, 500, 1000]: freq_stats, freq_total = np.array([0., 0., 0.]), 0", "= [], [] for result in results: qas_id = result[\"qas_ids\"]", "ex.answer_entity for ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for", "% hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) else:", "= {} for result in results: qas_id = result[\"qas_ids\"] prediction", "in enumerate(preds) ] tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy = {", "articles and extra whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \",", "ch not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s))))", "compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics = { \"accuracy\": accuracy,", "License for the specific language governing permissions and # limitations", "range(3): if \"sparse_%d\" % hop in result: incorrect_results[-1].update({ \"sparse_%d\" %", "1 found = True if key == 10: if my_type", "parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None,", "= read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def", "in range(layer_weights.shape[0])} for result in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds", "tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters", "= 2 * p * r / (p + r)", "gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for hop in range(3):", "for g in gold]) answer = simplify(answer) result = np.zeros(4)", "+= 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id],", "if any(rr < 10 for rr in ranks): single_acc +=", "different thresholds.\"\"\" for thresh in [5, 100, 500, 1000]: freq_stats,", "{ \"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\":", "= r = f = 0.0 else: p = tp", "metrics for HotpotQADataset. Args: dataset: An object of type HotpotQADataset.", "flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None,", "name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or", "result[\"dense_%d\" % hop], \"mention_%d\" % hop: result[\"mention_%d\" % hop], \"entity_%d\"", "in result: correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\"", "cat == \"Mn\": continue output.append(char) return \"\".join(output) def simplify(answer): \"\"\"Pre-process", "1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1", "= json.load(fin) telemetry, incorrect = [], [] n = 0", "result: incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\" %", "strings. output_prediction_file: File to store predictions to. **kwargs: Variable keyword", "rare_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (rare_stats[0], rare_stats[1],", "hop in range(3): if \"sparse_%d\" % hop in result: correct_results[-1].update({", "+= [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if", "/ FP stats.\"\"\" if gold: gold = set([simplify(g) for g", "exact_match = total = 0 for qid, ground_truths in answers.items():", "= {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} # Compute basic", "for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in", "json import random import re import string import unicodedata from", "json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"),", "predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and ground truth", "for result in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"]", "np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return", "accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\":", "if not skip_no_answer: message = \"Unanswered question %s will receive", "to. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping", "= simplify(answer) result = np.zeros(4) if gold: result[3] += 1", "from absl import app from absl import flags from bert", "aps.append(0.) found = False for key in [2, 5, 10,", "% hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" %", "{i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for", "total += 1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction,", "num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct /", "float(real_pos) if real_pos > 0 else 0. f = 2", "len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\",", "File to store predictions to. supervision: Type of supervision used", "my_type == \"bridge\": bridge_tot += 1 else: comp_tot += 1", "OF ANY KIND, either express or implied. # See the", "\"File with predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file", "See the License for the specific language governing permissions and", "gold: gold = set([simplify(g) for g in gold]) answer =", "num_new_entities = {i: 0. for i in range(layer_weights.shape[0])} for result", "0.]), 0 rare_stats, rare_total = np.array([0., 0., 0.]), 0 for", "Team Authors. # # Licensed under the Apache License, Version", "to in writing, software # distributed under the License is", "= 1.0 * num_same / len(ground_truth_tokens) f1 = (2 *", "inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for hop in range(3): if \"sparse_%d\"", "{\"eval/@%d\" % key: accuracy[key] for key in accuracy} metrics[\"accuracy\"] =", "1 # Non-accuracy stats layer_weights += result[\"layer_probs\"] layer_entities = {i:", "/ float(sys_pos) if sys_pos > 0 else 0. r =", "my_type = gt_types[qas_id] if my_type == \"bridge\": bridge_tot += 1", "\"bridge\": bridge_acc += 1 else: comp_acc += 1 # Non-accuracy", "if num_same == 0: return 0 precision = 1.0 *", "or agreed to in writing, software # distributed under the", "% hop], \"dense_%d\" % hop: result[\"dense_%d\" % hop], \"mention_%d\" %", "hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" % hop],", "result[\"entity_%d\" % hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\"", "slot filling results.\"\"\" import codecs import collections import gzip import", "+= 1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths)", "[aprf(stats), len(stats)] return rel_to_scores def aprf(g): \"\"\"Returns precision, recall and", "compliance with the License. # You may obtain a copy", "100.0 * f1 / total return {\"exact_match\": exact_match, \"f1\": f1}", "freq_total print( \"Threshold =\", thresh, \"rare\", rare_total, \"Micro-P %.3f Micro-R", "score(gold, answer): \"\"\"Compares answer to ground truth to return TP", "== 0: p = r = f = 0.0 else:", "if key == 10: if my_type == \"bridge\": bridge_acc +=", "1 ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap =", "\"Mn\": continue output.append(char) return \"\".join(output) def simplify(answer): \"\"\"Pre-process answer string.\"\"\"", "0.] for ch in inf_chain.values()} incorrect_results, correct_results = [], []", "values. \"\"\" del kwargs # Collect ground truth answers. gt_answer", "my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with", "0.: incorrect.append(key) n += 1 return telemetry, incorrect def aprf_relationwise(g):", "+= 1 else: if not answer: result[1] += 1 if", "= np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap = 0. cnt", "\"and\", \"\"} for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if", "EM score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths):", "import numpy as np import tensorflow.compat.v1 as tf PUNCTUATION =", "predictions = read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file):", "freq_stats += stats freq_total += 1 rare_stats /= rare_total freq_stats", "lazy slot filling results.\"\"\" import codecs import collections import gzip", "else: aps.append(0.) found = False for key in [2, 5,", "not use this file except in compliance with the License.", "A dict mapping metric names to values. \"\"\" del kwargs", "real_pos = sum([x[-1] for x in g]) if tp ==", "data_: if key not in predictions: continue g = data_[key][2:]", "you may not use this file except in compliance with", "= layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i] /", "result[2] += 1 return result def strip_accents_and_punct(text): \"\"\"Strips accents from", "val in relationwise.items()]) macro = macro / len(relationwise) return micro,", "in the model. **kwargs: Variable keyword arguments. Returns: metrics: A", "\".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"),", "of type OneHopDataset. results: A list of result dicts from", "\"sparse_%d\" % hop in result: incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\"", "governing permissions and # limitations under the License. # Lint", "continue total += 1 prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score,", "{ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if supervision == \"mention\":", "f1 = 100.0 * f1 / total return {\"exact_match\": exact_match,", "len(relationwise) return micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "python3 \"\"\"Evaluate lazy slot filling results.\"\"\" import codecs import collections", "= [aprf(stats), len(stats)] return rel_to_scores def aprf(g): \"\"\"Returns precision, recall", "i] = [ # name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"]", "(p + r) return np.asarray([p, r, f]) def score(gold, answer):", "# limitations under the License. # Lint as: python3 \"\"\"Evaluate", "metrics. num_correct = {2: 0., 5: 0., 10: 0., 20:", "np import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS =", "key in accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] = sum(aps) /", "sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] =", "else: p = tp / float(sys_pos) if sys_pos > 0", "compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro) if FLAGS.relation_counts_file is not", "answers = {} f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f =", "answer in gold: result[0] += 1 else: if not answer:", "qa[\"answers\"] f.close() return answers def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1", "answers = read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer)", "metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = [] for ground_truth in ground_truths:", "= json.loads(line) if i == 0 and \"header\" in example:", "in articles: toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics", "skip_no_answer=False): \"\"\"Compute F1 and EM scores.\"\"\" f1 = exact_match =", "for ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex", "layer_entities = {i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] =", "= exact_match = total = 0 for qid, ground_truths in", "% hop: result[\"sparse_%d\" % hop], \"dense_%d\" % hop: result[\"dense_%d\" %", "hop: result[\"dense_%d\" % hop], \"mention_%d\" % hop: result[\"mention_%d\" % hop],", "ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex in", "# Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics.", "The Google AI Language Team Authors. # # Licensed under", "found = False for key in [2, 5, 10, 20]:", "thresh: rare_stats += stats rare_total += 1 else: freq_stats +=", "else: micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro)", "= np.array([0., 0., 0.]), 0 rare_stats, rare_total = np.array([0., 0.,", "freq_stats /= freq_total print( \"Threshold =\", thresh, \"rare\", rare_total, \"Micro-P", "0: no_answer += 1 for rr in ranks: cnt +=", "% i] = [ # name_map[str(ee)] for ee in layer_entities[i]]", "tp / float(sys_pos) if sys_pos > 0 else 0. r", "\"aliases\" in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as", "return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions =", "+= cnt / (rr + 1) if ans: aps.append(ap /", "truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted answers from model.\")", "ex.inference_chain for ex in dataset.examples} # Compute basic metrics. num_correct", "def white_space_fix(text): return \" \".join(text.split()) def remove_punc(text): exclude = set(string.punctuation)", "f]) def score(gold, answer): \"\"\"Compares answer to ground truth to", "% hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\" %", "= compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro) if FLAGS.relation_counts_file is", "return telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns precision, recall and F", "prediction = result[\"predictions\"] if prediction in gt_answer[qas_id]: num_correct += 1", "for ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex", "= qa[\"answers\"] f.close() return answers def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute", "\"\"\"Compares answer to ground truth to return TP / FP", "accuracy = num_correct / len(all_predictions) # Compute advanced metrics. json.dump(all_predictions,", "relation, (stats, _) in relationwise.items(): if relation2counts.get(relation, 0) < thresh:", "0 precision = 1.0 * num_same / len(prediction_tokens) recall =", "\"rare\", rare_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (rare_stats[0],", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "F of the given statistics.\"\"\" tp, _, sys_pos, real_pos =", "absl import flags from bert import tokenization from language.labs.drkit import", "in read: item = json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation =", "np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0. for i in range(layer_weights.shape[0])} num_new_entities", "stats.\"\"\" if gold: gold = set([simplify(g) for g in gold])", "macro = sum([val[0] for _, val in relationwise.items()]) macro =", "return predictions def read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\" answers =", "json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list):", "gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct", "tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics. metrics = {\"eval/@%d\" % key:", "file except in compliance with the License. # You may", "output_prediction_file: File to store predictions to. **kwargs: Variable keyword arguments.", "question %s will receive score 0.\" % qid print(message) total", "sys_pos > 0 else 0. r = tp / float(real_pos)", "is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if __name__", "= 0. all_predictions = {} for result in results: qas_id", "answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return answers def evaluate(answers, predictions, skip_no_answer=False):", "from bert import tokenization from language.labs.drkit import input_fns import numpy", "dict mapping metric names to values. \"\"\" del kwargs #", "== \"bridge\": bridge_acc += 1 else: comp_acc += 1 #", "> 0.: incorrect.append(key) n += 1 return telemetry, incorrect def", "p * r / (p + r) return np.asarray([p, r,", "r, f]) def score(gold, answer): \"\"\"Compares answer to ground truth", "ex in dataset.examples} # Compute basic metrics. num_correct = {2:", "P, R, F.\"\"\" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro =", "metrics = { \"accuracy\": accuracy, } for ch, stats in", "if \"is_impossible\" in item and item[\"is_impossible\"]: continue if item[\"object\"] is", "# Copyright 2018 The Google AI Language Team Authors. #", "dataset: An object of type HotpotQADataset. results: A list of", "\"\"\"Returns precision, recall and F score for each relation.\"\"\" rel_to_stats", "Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\",", "in result[\"layer_%d_ent\" % i] if ee != -1]) num_layer_entities[i] +=", "in enumerate(f): example = json.loads(line) if i == 0 and", "**kwargs): \"\"\"Compute evaluation metrics for HotpotQADataset. Args: dataset: An object", "\"r\")) as read: data_ = {} for line in read:", "layer_weights += result[\"layer_probs\"] layer_entities = {i: set() for i in", "TwoHopDataset. Args: dataset: An object of type OneHopDataset. results: A", "predictions, skip_no_answer=False): \"\"\"Compute F1 and EM scores.\"\"\" f1 = exact_match", "tf.gfile.Open(prediction_file) as f: predictions = json.load(f) return predictions def read_answers(gold_file):", "codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_ = {} for line in", "result dicts from running estimator.predict. name_map: A mapping from prediction", "-1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) #", "metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions)", "return \" \".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return \"\".join(ch", "return np.asarray([p, r, f]) def score(gold, answer): \"\"\"Compares answer to", "/= rare_total freq_stats /= freq_total print( \"Threshold =\", thresh, \"rare\",", "\"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions", "cnt = 0. if any(rr < 10 for rr in", "for qa in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return answers", "np.uint64)): return int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return", "prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({", "metrics. num_correct = 0. all_predictions = {} for result in", "mapping metric names to values. \"\"\" del kwargs # Collect", "KIND, either express or implied. # See the License for", "PUNCTUATION: continue cat = unicodedata.category(char) if cat == \"Mn\": continue", "ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same", "key: accuracy[key] for key in accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"]", "for each relation.\"\"\" rel_to_stats = collections.defaultdict(list) for item in g:", "PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\",", "/ len(ans)) else: aps.append(0.) found = False for key in", "all_predictions[qas_id][\"layer_%d\" % i] = [ # name_map[str(ee)] for ee in", "== 0: return 0 precision = 1.0 * num_same /", "of supervision used in the model. **kwargs: Variable keyword arguments.", "if answer: result[2] += 1 return result def strip_accents_and_punct(text): \"\"\"Strips", "result in results: qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if", "= 0 for qid, ground_truths in answers.items(): if qid not", "gold: result[0] += 1 else: if not answer: result[1] +=", "(the \"License\"); # you may not use this file except", "results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for HotpotQADataset. Args:", "= set( [ee for ee in result[\"layer_%d_ent\" % i] if", "0.0 else: p = tp / float(sys_pos) if sys_pos >", "basic metrics. num_correct = {2: 0., 5: 0., 10: 0.,", "item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item and item[\"is_impossible\"]: continue if item[\"object\"]", "print(\"Micro\", micro) print(\"Macro\", macro) if FLAGS.relation_counts_file is not None: r2c", "== 0: no_answer += 1 for rr in ranks: cnt", "for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i])) for", "continue for qa in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return", "tp, _, sys_pos, real_pos = sum([x[-1] for x in g])", "relation = ( item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] =", "# # Unless required by applicable law or agreed to", "continue output.append(char) return \"\".join(output) def simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks", "= {\"the\", \"a\", \"an\", \"and\", \"\"} for t in answer.strip().lower().split():", "{ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0]", "key: (num_correct[key] / len(all_predictions)) for key in num_correct } #", "2 * p * r / (p + r) return", "rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro) if", "for rr in ranks: cnt += 1 ap += cnt", "= [] for char in text: if char in PUNCTUATION:", "relations for different thresholds.\"\"\" for thresh in [5, 100, 500,", "cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder) #", "or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found = True if", "\" \", text) def white_space_fix(text): return \" \".join(text.split()) def remove_punc(text):", "range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" %", "for key in num_correct } # Compute advanced metrics. json.dump(all_predictions,", "_ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics = {", "return \"\".join(output) def simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks = []", "Compute basic metrics. num_correct = 0. all_predictions = {} chain2stats", "implied. # See the License for the specific language governing", "m[1], g, a, stats]) if stats[0] == 0. and stats[3]", "len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i]", "= sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"]", "\"macro-f\": macro[2], } return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file,", "in gold: result[0] += 1 else: if not answer: result[1]", "= unicodedata.category(char) if cat == \"Mn\": continue output.append(char) return \"\".join(output)", "aps = [] no_answer = 0. all_predictions = {} bridge_acc,", "predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A dict", "receive score 0.\" % qid print(message) total += 1 continue", "and F of the given statistics.\"\"\" tp, _, sys_pos, real_pos", "tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file +", "return 0 precision = 1.0 * num_same / len(prediction_tokens) recall", "correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\" % hop:", "stats freq_total += 1 rare_stats /= rare_total freq_stats /= freq_total", "0 rare_stats, rare_total = np.array([0., 0., 0.]), 0 for relation,", "scores_for_ground_truths = [] for ground_truth in ground_truths: my_score = metric_fn(prediction,", "f1 = exact_match = total = 0 for qid, ground_truths", "not in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def", "supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args:", "\"\"\"Compute evaluation metrics for HotpotQADataset. Args: dataset: An object of", "% i] if ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i]", "relation2counts.get(relation, 0) < thresh: rare_stats += stats rare_total += 1", "prediction = predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 +=", "model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with relation counts.\") class NumpyEncoder(json.JSONEncoder):", "in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i])) for i, pred", "a piece of text.\"\"\" text = unicodedata.normalize(\"NFD\", text) output =", "len(all_predictions) for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i]", "% hop: result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id]", "Compute basic metrics. num_correct = {2: 0., 5: 0., 10:", "dict): relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation = (", "Unless required by applicable law or agreed to in writing,", "predictions to. supervision: Type of supervision used in the model.", "in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin:", "= bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot metrics[\"analysis/single_accuracy\"]", "\"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in", "\"is_impossible\" in item and item[\"is_impossible\"]: continue if item[\"object\"] is None:", "the specific language governing permissions and # limitations under the", "num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s): \"\"\"Lower text and", "for ex in dataset.examples} # Compute basic metrics. num_correct =", "set([simplify(g) for g in gold]) answer = simplify(answer) result =", "None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if __name__ == \"__main__\":", "for relation, (stats, _) in relationwise.items(): if relation2counts.get(relation, 0) <", "metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics", "np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj) elif isinstance(obj,", "telemetry, incorrect = [], [] n = 0 for key", "hop: result[\"sparse_%d\" % hop], \"dense_%d\" % hop: result[\"dense_%d\" % hop],", "= read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read", "numpy types.\"\"\" def default(self, obj): if isinstance(obj, (np.int_, np.intc, np.intp,", "answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not in articles: toks.append(tok)", "1 rare_stats /= rare_total freq_stats /= freq_total print( \"Threshold =\",", "chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct /", "accuracy, } for ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch]", "+= 1 found = True if key == 10: if", "json.loads(line) if i == 0 and \"header\" in example: continue", "/ len(all_predictions)) for key in num_correct } # Compute advanced", "answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types", "\"mention_scores_%d\" % hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\"", "and \"header\" in example: continue for qa in example[\"qas\"]: answers[qa[\"qid\"]]", "f1 / total return {\"exact_match\": exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file,", "incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\":", "text if ch not in exclude) def lower(text): return text.lower()", "if item[\"object\"] is None: continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] +=", "\"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2], } return", "kwargs # Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity", "+= 1 if answer: result[2] += 1 return result def", "ranks = np.sort(ranks) ap = 0. cnt = 0. if", "= tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for i, line", "metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation", "in PUNCTUATION: continue cat = unicodedata.category(char) if cat == \"Mn\":", "{ key: (num_correct[key] / len(all_predictions)) for key in num_correct }", "+ recall) return f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\"", "( item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]]", "of type HotpotQADataset. results: A list of result dicts from", "values. \"\"\" del kwargs # Collect ground truth answers. gt_mentions", "and remove punctuation, articles and extra whitespace.\"\"\" def remove_articles(text): return", "len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" %", "== \"Mn\": continue output.append(char) return \"\".join(output) def simplify(answer): \"\"\"Pre-process answer", "result[\"entity_scores_%d\" % hop], }) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id],", "ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file):", "{} for line in read: item = json.loads(line.strip()) if isinstance(item[\"relation\"],", "0, 0 single_acc = 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities =", "range(3): if \"sparse_%d\" % hop in result: correct_results[-1].update({ \"sparse_%d\" %", "toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of rare", "\"w\")) # Return metrics. metrics = {\"eval/@%d\" % key: accuracy[key]", "len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1 =", "i] if ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] +=", "rr in ranks): single_acc += 1 if ranks.shape[0] == 0:", "for result in results: qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"]", "for _, val in relationwise.items()]) macro = macro / len(relationwise)", "score for each relation.\"\"\" rel_to_stats = collections.defaultdict(list) for item in", "np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj) elif", "%.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f Micro-R", "no_answer / len(all_predictions) for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i]", "if cat == \"Mn\": continue output.append(char) return \"\".join(output) def simplify(answer):", "skip_no_answer: message = \"Unanswered question %s will receive score 0.\"", "import json import random import re import string import unicodedata", "ex in dataset.examples} # Compute basic metrics. num_correct = 0.", "return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and", "text strings. output_prediction_file: File to store predictions to. supervision: Type", "item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item and", "def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1 and EM scores.\"\"\" f1", "1 if ranks.shape[0] == 0: no_answer += 1 for rr", "\" \".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return \"\".join(ch for", "+= [item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with", "num_same == 0: return 0 precision = 1.0 * num_same", "== \"bridge\": bridge_tot += 1 else: comp_tot += 1 ranks", "/ float(real_pos) if real_pos > 0 else 0. f =", "Micro-R %.3f Micro-F %.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_):", "# This is the fix return obj.tolist() return json.JSONEncoder.default(self, obj)", "layer_entities[i] = set( [ee for ee in result[\"layer_%d_ent\" % i]", "return data structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read:", "= {ch: [0., 0.] for ch in inf_chain.values()} incorrect_results, correct_results", "return TP / FP stats.\"\"\" if gold: gold = set([simplify(g)", "output.append(char) return \"\".join(output) def simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks =", "0. all_predictions = {} chain2stats = {ch: [0., 0.] for", "freq_stats[2])) def main(_): eval_type = \"hotpot\" if eval_type == \"hotpot\":", "telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns precision, recall and F score", "(rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f Micro-R %.3f Micro-F", "score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths", "You may obtain a copy of the License at #", "# Return metrics. metrics = {\"eval/@%d\" % key: accuracy[key] for", "= { \"accuracy\": accuracy, } for ch, stats in chain2stats.items():", "def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics", "\"predictions\": result[\"predictions\"], }) for hop in range(3): if \"sparse_%d\" %", "of the given statistics.\"\"\" tp, _, sys_pos, real_pos = sum([x[-1]", "precision * recall) / (precision + recall) return f1 def", "0., 5: 0., 10: 0., 20: 0.} aps = []", "in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]: data_[item[\"id\"]]", "metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot", "Return metrics. metrics = { \"accuracy\": accuracy, } for ch,", "\"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1],", "ex.subject_entity[0] for ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for", "hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" % hop:", "g]) if tp == 0: p = r = f", "np.zeros(4) if gold: result[3] += 1 if answer in gold:", "for ch in inf_chain.values()} incorrect_results, correct_results = [], [] for", "dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions =", "100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder) # Return metrics. metrics", "store predictions to. supervision: Type of supervision used in the", "= 0.0 else: p = tp / float(sys_pos) if sys_pos", "= 0. all_predictions = {} chain2stats = {ch: [0., 0.]", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "\"\"} for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok", "in ranks): single_acc += 1 if ranks.shape[0] == 0: no_answer", "layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i])) for i, pred in", "in example: continue for qa in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"]", "result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)]", "}) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\":", "metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object of", "# Compute basic metrics. num_correct = 0. all_predictions = {}", "set( [ee for ee in result[\"layer_%d_ent\" % i] if ee", "+= 1 for rr in ranks: cnt += 1 ap", "1 continue total += 1 prediction = predictions[qid] exact_match +=", "text and remove punctuation, articles and extra whitespace.\"\"\" def remove_articles(text):", "extra whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def", "/ total f1 = 100.0 * f1 / total return", "tok not in articles: toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts):", "line in enumerate(f): example = json.loads(line) if i == 0", "[item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file,", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "sum([val[0] for _, val in relationwise.items()]) macro = macro /", "/ len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro,", "License. # You may obtain a copy of the License", "results, name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset", "= aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for _,", "in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\"", "gt_ques = {ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity =", "< 10 for rr in ranks): single_acc += 1 if", "list): relation = ( item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]]", "rare_total += 1 else: freq_stats += stats freq_total += 1", "\"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for hop in", "truth to return TP / FP stats.\"\"\" if gold: gold", "num_correct = {2: 0., 5: 0., 10: 0., 20: 0.}", "file with relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for", "aprf(g): \"\"\"Returns precision, recall and F of the given statistics.\"\"\"", "ch] = stats[0] / stats[1] return metrics def multihop_eval_fn(dataset, results,", "used in the model. **kwargs: Variable keyword arguments. Returns: metrics:", "{ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic metrics.", "for ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] = stats[0]", "\"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id],", "len(ans)) else: aps.append(0.) found = False for key in [2,", "name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump(", "truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples}", "freq_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (freq_stats[0], freq_stats[1],", "data_[key][2:] a = predictions[key] m = data_[key][:2] stats = score(g,", "return metrics def normalize_answer(s): \"\"\"Lower text and remove punctuation, articles", "\"freq\", freq_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (freq_stats[0],", "gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain =", "\"sparse_%d\" % hop in result: correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\"", "/ total return {\"exact_match\": exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file,", "return int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return float(obj)", "/ len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i] / len(all_predictions) return", "{ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain", "exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file)", "sys_pos, real_pos = sum([x[-1] for x in g]) if tp", "= np.array([0., 0., 0.]), 0 for relation, (stats, _) in", "not skip_no_answer: message = \"Unanswered question %s will receive score", "print(message) total += 1 continue total += 1 prediction =", "if stats[0] == 0. and stats[3] > 0.: incorrect.append(key) n", "articles = {\"the\", \"a\", \"an\", \"and\", \"\"} for t in", "\"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2]))", "FLAGS.relation_counts_file is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if", "len(stats)] return rel_to_scores def aprf(g): \"\"\"Returns precision, recall and F", "elif isinstance(item[\"relation\"], list): relation = ( item[\"relation\"][0][\"wikidata_id\"] + \"_\" +", "thresh, \"rare\", rare_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" %", "output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args:", "isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation =", "\"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1", "all_predictions = {} chain2stats = {ch: [0., 0.] for ch", "rare_stats += stats rare_total += 1 else: freq_stats += stats", "\"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\" % hop: result[\"dense_%d\"", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "% i] = num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s):", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "if key not in predictions: continue g = data_[key][2:] a", "aps.append(ap / len(ans)) else: aps.append(0.) found = False for key", "Returns: metrics: A dict mapping metric names to values. \"\"\"", "len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc /", "= {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain = {ex.qas_id:", "0 else 0. r = tp / float(real_pos) if real_pos", "item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if", "language governing permissions and # limitations under the License. #", "required by applicable law or agreed to in writing, software", "An object of type OneHopDataset. results: A list of result", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "all_predictions = {} bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot", "aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for _, val", "return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths =", "read: item = json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"]", "+= 1 # Non-accuracy stats layer_weights += result[\"layer_probs\"] layer_entities =", "rr in ranks: cnt += 1 ap += cnt /", "None, \"File with predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON", "agreed to in writing, software # distributed under the License", "metrics. metrics = { \"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1],", "if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for i, line in enumerate(f):", "predictions and ground truth and return P, R, F.\"\"\" telemetry,", "False for key in [2, 5, 10, 20]: if found", "distributed under the License is distributed on an \"AS IS\"", "del kwargs # Collect ground truth answers. gt_answer = {ex.qas_id:", "lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute F1", "in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return answers def evaluate(answers,", "if \"sparse_%d\" % hop in result: correct_results[-1].update({ \"sparse_%d\" % hop:", "= result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores = result[\"top_vals\"] ans =", "1 else: comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0] ranks", "/ comp_tot metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer", "metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" % i]", "read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions = json.load(f) return predictions", "result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], })", "\"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\"", "single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions) for i", "\"\"\"Print statistics of rare relations for different thresholds.\"\"\" for thresh", "score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens)", "tokenization from language.labs.drkit import input_fns import numpy as np import", "dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]: data_[item[\"id\"]] +=", "gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for hop", "from prediction indices to text strings. output_prediction_file: File to store", "% hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy", "dataset.examples} if supervision == \"mention\": gt_answer = gt_mentions else: gt_answer", "] tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy = { key: (num_correct[key]", "10: if my_type == \"bridge\": bridge_acc += 1 else: comp_acc", "len(all_predictions)) for key in num_correct } # Compute advanced metrics.", "incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and ground truth and", "= {i: 0. for i in range(layer_weights.shape[0])} for result in", "\"\"\"Special json encoder for numpy types.\"\"\" def default(self, obj): if", "item[\"object\"] is None: continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]]", "real_pos > 0 else 0. f = 2 * p", "total return {\"exact_match\": exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True):", "ex in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex in", "incorrect.append(key) n += 1 return telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns", "incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry)", "\"mention_%d\" % hop: result[\"mention_%d\" % hop], \"entity_%d\" % hop: result[\"entity_%d\"", "to ground truth to return TP / FP stats.\"\"\" if", "dicts from running estimator.predict. name_map: A mapping from prediction indices", "will receive score 0.\" % qid print(message) total += 1", "prediction indices to text strings. output_prediction_file: File to store predictions", "rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats in rel_to_stats.items(): rel_to_scores[rel]", "coding=utf-8 # Copyright 2018 The Google AI Language Team Authors.", "= result[\"predictions\"] if prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id]", "+= result[\"layer_probs\"] layer_entities = {i: set() for i in range(layer_weights.shape[0])}", "\"hotpot\" if eval_type == \"hotpot\": test_hotpot_eval() else: micro, macro, rwise,", "preds = result[\"top_idx\"] scores = result[\"top_vals\"] ans = gt_answer[qas_id] my_type", "+= 1 ap += cnt / (rr + 1) if", "\"dense_%d\" % hop: result[\"dense_%d\" % hop], \"mention_%d\" % hop: result[\"mention_%d\"", "stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def", "is the fix return obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset,", "random import re import string import unicodedata from absl import", "macro[1], \"macro-f\": macro[2], } return metrics def hotpot_eval_fn(dataset, results, name_map,", "and return data structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as", "0. and stats[3] > 0.: incorrect.append(key) n += 1 return", "for numpy types.\"\"\" def default(self, obj): if isinstance(obj, (np.int_, np.intc,", "comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks)", "ground truth and return data structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file,", "remove punctuation, articles and extra whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\",", "% (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type = \"hotpot\" if", "OR CONDITIONS OF ANY KIND, either express or implied. #", "if eval_type == \"hotpot\": test_hotpot_eval() else: micro, macro, rwise, _", "the License is distributed on an \"AS IS\" BASIS, #", "{} chain2stats = {ch: [0., 0.] for ch in inf_chain.values()}", "as f: predictions = json.load(f) return predictions def read_answers(gold_file): \"\"\"Read", "tp / float(real_pos) if real_pos > 0 else 0. f", "flags from bert import tokenization from language.labs.drkit import input_fns import", "**kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset:", "if isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation", "rare relations for different thresholds.\"\"\" for thresh in [5, 100,", "dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain", "= gt_types[qas_id] if my_type == \"bridge\": bridge_tot += 1 else:", "{i: 0. for i in range(layer_weights.shape[0])} num_new_entities = {i: 0.", "\".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return \"\".join(ch for ch", "(np.float_, np.float16, np.float32, np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)): #", "in answers.items(): if qid not in predictions: if not skip_no_answer:", "= True if key == 10: if my_type == \"bridge\":", "prediction, ground_truths): scores_for_ground_truths = [] for ground_truth in ground_truths: my_score", "0., 0.]), 0 rare_stats, rare_total = np.array([0., 0., 0.]), 0", "range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]): layer_entities[i] =", "rel_to_stats = collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores =", "law or agreed to in writing, software # distributed under", "result[\"sparse_%d\" % hop], \"dense_%d\" % hop: result[\"dense_%d\" % hop], \"mention_%d\"", "for key in accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] = sum(aps)", "= data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1], g, a,", "if sys_pos > 0 else 0. r = tp /", "char in text: if char in PUNCTUATION: continue cat =", "1 else: comp_acc += 1 # Non-accuracy stats layer_weights +=", "[2, 5, 10, 20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key]", "\"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An", "bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] =", "for i, pred in enumerate(preds) ] tf.logging.info(\"Evaluated %d items\", len(all_predictions))", "else: if not answer: result[1] += 1 if answer: result[2]", "_) in relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats +=", "if ranks.shape[0] == 0: no_answer += 1 for rr in", "(name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds) ] tf.logging.info(\"Evaluated %d", "may obtain a copy of the License at # #", "% i] = num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] =", "+= len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i] = [", "} return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute", "= sum([x[-1] for x in g]) if tp == 0:", "return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute", "= (2 * precision * recall) / (precision + recall)", "num_correct[key] += 1 found = True if key == 10:", "may not use this file except in compliance with the", "ranks.shape[0] == 0: no_answer += 1 for rr in ranks:", "key in data_: if key not in predictions: continue g", "no_answer += 1 for rr in ranks: cnt += 1", "set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i", "this file except in compliance with the License. # You", "1 return result def strip_accents_and_punct(text): \"\"\"Strips accents from a piece", "* f1 / total return {\"exact_match\": exact_match, \"f1\": f1} def", "codecs import collections import gzip import json import random import", "item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation = ( item[\"relation\"][0][\"wikidata_id\"] + \"_\"", "= set([simplify(g) for g in gold]) answer = simplify(answer) result", "result def strip_accents_and_punct(text): \"\"\"Strips accents from a piece of text.\"\"\"", "= accuracy[10] metrics[\"eval/map\"] = sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc", "normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens)", "* num_same / len(prediction_tokens) recall = 1.0 * num_same /", "stats]) if stats[0] == 0. and stats[3] > 0.: incorrect.append(key)", "Compute basic metrics. num_correct = 0. all_predictions = {} for", "# # Licensed under the Apache License, Version 2.0 (the", "import input_fns import numpy as np import tensorflow.compat.v1 as tf", "from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with relation counts.\") class", "metrics[\"inference-chains-acc/\" + ch] = stats[0] / stats[1] return metrics def", "Variable keyword arguments. Returns: metrics: A dict mapping metric names", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "metrics. metrics = {\"eval/@%d\" % key: accuracy[key] for key in", "key in num_correct } # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file,", "gold]) answer = simplify(answer) result = np.zeros(4) if gold: result[3]", "An object of type HotpotQADataset. results: A list of result", "data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item and item[\"is_impossible\"]:", "0: return 0 precision = 1.0 * num_same / len(prediction_tokens)", "isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)):", "output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset.", "num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\"", "None: continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\"", "for ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex", "= result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction in gt_answer[qas_id]: num_correct", "< thresh: rare_stats += stats rare_total += 1 else: freq_stats", "to store predictions to. **kwargs: Variable keyword arguments. Returns: metrics:", "names to values. \"\"\" del kwargs # Collect ground truth", "ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in", "= np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0. for i in range(layer_weights.shape[0])}", "1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions,", "[item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\"", "**kwargs: Variable keyword arguments. Returns: metrics: A dict mapping metric", "found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found = True", "comp_tot metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer /", "hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\" % hop],", "for ee in result[\"layer_%d_ent\" % i] if ee != -1])", "2018 The Google AI Language Team Authors. # # Licensed", "evaluation metrics for HotpotQADataset. Args: dataset: An object of type", "= unicodedata.normalize(\"NFD\", text) output = [] for char in text:", "accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results,", "micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2], }", "Return metrics. metrics = {\"eval/@%d\" % key: accuracy[key] for key", "read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\" answers = {} f =", "+ \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\"", "F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common =", "precision, recall and F of the given statistics.\"\"\" tp, _,", "tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder)", "or implied. # See the License for the specific language", "* recall) / (precision + recall) return f1 def exact_match_score(prediction,", "metric names to values. \"\"\" del kwargs # Collect ground", "no_answer = 0. all_predictions = {} bridge_acc, comp_acc = 0.,", "hop in result: correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop],", "r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if __name__ == \"__main__\": app.run(main)", "thresh in [5, 100, 500, 1000]: freq_stats, freq_total = np.array([0.,", "= stats[0] / stats[1] return metrics def multihop_eval_fn(dataset, results, name_map,", "\"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump(", "hop], \"dense_%d\" % hop: result[\"dense_%d\" % hop], \"mention_%d\" % hop:", "\"mention\": gt_answer = gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for", "This is the fix return obj.tolist() return json.JSONEncoder.default(self, obj) def", "from absl import flags from bert import tokenization from language.labs.drkit", "obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics", "the fix return obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results,", "return re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def white_space_fix(text): return \" \".join(text.split())", "1000]: freq_stats, freq_total = np.array([0., 0., 0.]), 0 rare_stats, rare_total", "to text strings. output_prediction_file: File to store predictions to. supervision:", "= comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions) metrics[\"analysis/no_answers\"]", "_, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics =", "to store predictions to. supervision: Type of supervision used in", "del kwargs # Collect ground truth answers. gt_mentions = {ex.qas_id:", "of text.\"\"\" text = unicodedata.normalize(\"NFD\", text) output = [] for", "return micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results", "n = 0 for key in data_: if key not", "return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of rare relations", "qid not in predictions: if not skip_no_answer: message = \"Unanswered", "\"entity_%d\" % hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\"", "{ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} # Compute basic metrics.", "g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats in rel_to_stats.items():", "\"Threshold =\", thresh, \"rare\", rare_total, \"Micro-P %.3f Micro-R %.3f Micro-F", "f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\" return normalize_answer(prediction) ==", "1 else: freq_stats += stats freq_total += 1 rare_stats /=", "with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions = json.load(fin) telemetry, incorrect", "0. all_predictions = {} for result in results: qas_id =", "0 single_acc = 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i:", "ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f:", "in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]): layer_entities[i]", "frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File", "% i] = layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] =", "multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\", **kwargs): \"\"\"Compute evaluation metrics for", "in [2, 5, 10, 20]: if found or np.in1d(ans, preds[:key]).all():", "ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in", "cls=NumpyEncoder) # Return metrics. metrics = { \"accuracy\": accuracy, }", "def exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth)", "predictions: if not skip_no_answer: message = \"Unanswered question %s will", "def score(gold, answer): \"\"\"Compares answer to ground truth to return", "not answer: result[1] += 1 if answer: result[2] += 1", "output_prediction_file) # Return metrics. metrics = { \"accuracy\": accuracy, \"micro-p\":", "chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] = stats[0] / stats[1] return metrics", "truth and return P, R, F.\"\"\" telemetry, incorrect = read_results(ground_truth_file,", "example: continue for qa in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close()", "i in range(layer_weights.shape[0])} for result in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\")", "\"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0],", "import random import re import string import unicodedata from absl", "relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for numpy types.\"\"\"", "\"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"],", "stats layer_weights += result[\"layer_probs\"] layer_entities = {i: set() for i", "read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro =", "in text if ch not in exclude) def lower(text): return", "basic metrics. num_correct = 0. all_predictions = {} chain2stats =", "qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction == gt_answer[qas_id]:", "for key in data_: if key not in predictions: continue", "whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def white_space_fix(text):", "return f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\" return normalize_answer(prediction)", "+= 1 if answer in gold: result[0] += 1 else:", "Micro-F %.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f", "supervision == \"mention\": gt_answer = gt_mentions else: gt_answer = {ex.qas_id:", "F score for each relation.\"\"\" rel_to_stats = collections.defaultdict(list) for item", "metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as", "* precision * recall) / (precision + recall) return f1", "if ans: aps.append(ap / len(ans)) else: aps.append(0.) found = False", "\"\"\"Read predictions and ground truth and return P, R, F.\"\"\"", "import re import string import unicodedata from absl import app", "def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for", "name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i]))", "enumerate(preds) ] tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy = { key:", "metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match =", "micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\",", "bridge_tot, comp_tot = 0, 0 single_acc = 0. layer_weights =", "= [ # name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] =", "not in predictions: if not skip_no_answer: message = \"Unanswered question", "in dataset.examples} # Compute basic metrics. num_correct = 0. all_predictions", "if prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1", "{} f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for", "indices to text strings. output_prediction_file: File to store predictions to.", "\"w\"), cls=NumpyEncoder) # Return metrics. metrics = { \"accuracy\": accuracy,", "# Return metrics. metrics = { \"accuracy\": accuracy, \"micro-p\": micro[0],", "[ (name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds) ] tf.logging.info(\"Evaluated", "10 for rr in ranks): single_acc += 1 if ranks.shape[0]", "remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def white_space_fix(text): return \"", "rare_total = np.array([0., 0., 0.]), 0 for relation, (stats, _)", "to. supervision: Type of supervision used in the model. **kwargs:", "in results: qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction", "incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\" % hop:", "macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics. metrics", "isinstance(obj, (np.ndarray,)): # This is the fix return obj.tolist() return", "n += 1 return telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns precision,", "for ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex", "\", text) def white_space_fix(text): return \" \".join(text.split()) def remove_punc(text): exclude", "np.float32, np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)): # This is", "range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee in result[\"layer_%d_ent\" %", "in writing, software # distributed under the License is distributed", "gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples}", "1 for rr in ranks: cnt += 1 ap +=", "\"name\" in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]:", "ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 *", "rare_stats /= rare_total freq_stats /= freq_total print( \"Threshold =\", thresh,", "stats[0] == 0. and stats[3] > 0.: incorrect.append(key) n +=", "punctuation, articles and extra whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \"", "gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example = json.loads(line) if", "in ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def", "\"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder)", "= {} f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f)", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "License, Version 2.0 (the \"License\"); # you may not use", "1) if ans: aps.append(ap / len(ans)) else: aps.append(0.) found =", "= total = 0 for qid, ground_truths in answers.items(): if", "(num_correct[key] / len(all_predictions)) for key in num_correct } # Compute", "in predictions: if not skip_no_answer: message = \"Unanswered question %s", "r / (p + r) return np.asarray([p, r, f]) def", "collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for", "result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\":", "return P, R, F.\"\"\" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro", "supervision: Type of supervision used in the model. **kwargs: Variable", "= 0. if any(rr < 10 for rr in ranks):", "Args: dataset: An object of type HotpotQADataset. results: A list", "\"r\")) as fin: predictions = json.load(fin) telemetry, incorrect = [],", "result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction in gt_answer[qas_id]: num_correct +=", "i] = layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i]", "relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for _, val in", "if supervision == \"mention\": gt_answer = gt_mentions else: gt_answer =", "None, \"File with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with", "relation2counts): \"\"\"Print statistics of rare relations for different thresholds.\"\"\" for", "= gt_answer[qas_id] my_type = gt_types[qas_id] if my_type == \"bridge\": bridge_tot", "the License for the specific language governing permissions and #", "[ # name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [", "wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset", "return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\"", "= \"hotpot\" if eval_type == \"hotpot\": test_hotpot_eval() else: micro, macro,", "result[\"top_idx\"] scores = result[\"top_vals\"] ans = gt_answer[qas_id] my_type = gt_types[qas_id]", "for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i] /", "\"\"\" del kwargs # Collect ground truth answers. gt_mentions =", "/ stats[1] return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\",", "1.0 * num_same / len(ground_truth_tokens) f1 = (2 * precision", "\"\"\"Compute EM score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction,", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "result = np.zeros(4) if gold: result[3] += 1 if answer", "correct_results = [], [] for result in results: qas_id =", "(precision + recall) return f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute EM", "f: predictions = json.load(f) return predictions def read_answers(gold_file): \"\"\"Read ground", "ground truth to return TP / FP stats.\"\"\" if gold:", "\"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of rare relations for", "re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def white_space_fix(text): return \" \".join(text.split()) def", "/ (rr + 1) if ans: aps.append(ap / len(ans)) else:", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "in inf_chain.values()} incorrect_results, correct_results = [], [] for result in", "class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for numpy types.\"\"\" def default(self,", "running estimator.predict. name_map: A mapping from prediction indices to text", "TP / FP stats.\"\"\" if gold: gold = set([simplify(g) for", "ap = 0. cnt = 0. if any(rr < 10", "bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot = 0, 0", "import codecs import collections import gzip import json import random", "in range(layer_weights.shape[0])} num_new_entities = {i: 0. for i in range(layer_weights.shape[0])}", "normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = []", "if my_type == \"bridge\": bridge_tot += 1 else: comp_tot +=", "for line in read: item = json.loads(line.strip()) if isinstance(item[\"relation\"], dict):", "= ( item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation,", "0., 10: 0., 20: 0.} aps = [] no_answer =", "\"\"\"Evaluate lazy slot filling results.\"\"\" import codecs import collections import", "result[\"predictions\"] if prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] +=", "stats[3] > 0.: incorrect.append(key) n += 1 return telemetry, incorrect", "metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i] / len(all_predictions) return metrics def", "# distributed under the License is distributed on an \"AS", "= name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\"))", "filling results.\"\"\" import codecs import collections import gzip import json", "# Unless required by applicable law or agreed to in", "sum(common.values()) if num_same == 0: return 0 precision = 1.0", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "rare_stats, rare_total = np.array([0., 0., 0.]), 0 for relation, (stats,", "def f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens", "+= metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 * exact_match /", "= {} for line in read: item = json.loads(line.strip()) if", "= tp / float(real_pos) if real_pos > 0 else 0.", "def simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks = [] articles =", "the Apache License, Version 2.0 (the \"License\"); # you may", "not in predictions: continue g = data_[key][2:] a = predictions[key]", "result[3] += 1 if answer in gold: result[0] += 1", "1 if answer in gold: result[0] += 1 else: if", "of result dicts from running estimator.predict. name_map: A mapping from", "= json.load(f) return predictions def read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\"", "from language.labs.drkit import input_fns import numpy as np import tensorflow.compat.v1", "i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] = layer_weights[i] / len(all_predictions)", "tok = strip_accents_and_punct(t) if tok not in articles: toks.append(tok) return", "= predictions[key] m = data_[key][:2] stats = score(g, a) telemetry.append([m[0],", "skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and ground truth and", "hop], }) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id],", "for qid, ground_truths in answers.items(): if qid not in predictions:", "in exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction,", "object of type OneHopDataset. results: A list of result dicts", "np.sort(ranks) ap = 0. cnt = 0. if any(rr <", "ex in dataset.examples} if supervision == \"mention\": gt_answer = gt_mentions", "np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap = 0. cnt =", "f.close() return answers def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1 and", "ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return 0", "unicodedata.category(char) if cat == \"Mn\": continue output.append(char) return \"\".join(output) def", "macro = macro / len(relationwise) return micro, macro, relationwise, incorrect", "relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation = ( item[\"relation\"][0][\"wikidata_id\"]", "+= 1 continue total += 1 prediction = predictions[qid] exact_match", "np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)): # This is the", "= [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item and item[\"is_impossible\"]: continue", "import flags from bert import tokenization from language.labs.drkit import input_fns", "with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_ = {}", "if \"name\" in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\" in", "hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1] +=", "len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i] = [ #", "cnt += 1 ap += cnt / (rr + 1)", "return result def strip_accents_and_punct(text): \"\"\"Strips accents from a piece of", "in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex in dataset.examples}", "% hop], \"mention_%d\" % hop: result[\"mention_%d\" % hop], \"entity_%d\" %", "name_map[str(prediction)] accuracy = num_correct / len(all_predictions) # Compute advanced metrics.", "import string import unicodedata from absl import app from absl", "under the License is distributed on an \"AS IS\" BASIS,", "# Collect ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for", "ee in result[\"layer_%d_ent\" % i] if ee != -1]) num_layer_entities[i]", "metrics: A dict mapping metric names to values. \"\"\" del", "\"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2], } return metrics def", "truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples}", "gt_answer = gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex", "total f1 = 100.0 * f1 / total return {\"exact_match\":", "else 0. r = tp / float(real_pos) if real_pos >", "rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores", "}) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct", "simplify(answer) result = np.zeros(4) if gold: result[3] += 1 if", "1 else: if not answer: result[1] += 1 if answer:", "= { \"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2],", "scores = result[\"top_vals\"] ans = gt_answer[qas_id] my_type = gt_types[qas_id] if", "layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0. for i in", "result: correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\" %", "= { key: (num_correct[key] / len(all_predictions)) for key in num_correct", "/ len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc", "gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if supervision", "if tp == 0: p = r = f =", "for hop in range(3): if \"sparse_%d\" % hop in result:", "gzip import json import random import re import string import", "macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2], } return metrics def hotpot_eval_fn(dataset,", "of rare relations for different thresholds.\"\"\" for thresh in [5,", "advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _, _ =", "freq_total += 1 rare_stats /= rare_total freq_stats /= freq_total print(", "0) < thresh: rare_stats += stats rare_total += 1 else:", "g in gold]) answer = simplify(answer) result = np.zeros(4) if", "ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex in", "continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in", "text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens", "= data_[key][2:] a = predictions[key] m = data_[key][:2] stats =", "if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]:", "telemetry.append([m[0], m[1], g, a, stats]) if stats[0] == 0. and", "AI Language Team Authors. # # Licensed under the Apache", "language.labs.drkit import input_fns import numpy as np import tensorflow.compat.v1 as", "evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and ground", "% qid print(message) total += 1 continue total += 1", "comp_tot = 0, 0 single_acc = 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"])", "result[\"mention_%d\" % hop], \"entity_%d\" % hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\"", "ranks: cnt += 1 ap += cnt / (rr +", "np.array([0., 0., 0.]), 0 rare_stats, rare_total = np.array([0., 0., 0.]),", "if FLAGS.relation_counts_file is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c)", "relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats += stats rare_total", "num_correct / len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\"))", "permissions and # limitations under the License. # Lint as:", "scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions", "structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_ =", "float(sys_pos) if sys_pos > 0 else 0. r = tp", "metrics def normalize_answer(s): \"\"\"Lower text and remove punctuation, articles and", "0., 0.]), 0 for relation, (stats, _) in relationwise.items(): if", "g = data_[key][2:] a = predictions[key] m = data_[key][:2] stats", "in text: if char in PUNCTUATION: continue cat = unicodedata.category(char)", "(np.ndarray,)): # This is the fix return obj.tolist() return json.JSONEncoder.default(self,", "as fin: predictions = json.load(fin) telemetry, incorrect = [], []", "return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute", "precision = 1.0 * num_same / len(prediction_tokens) recall = 1.0", "if my_type == \"bridge\": bridge_acc += 1 else: comp_acc +=", "ans = gt_answer[qas_id] my_type = gt_types[qas_id] if my_type == \"bridge\":", "ANY KIND, either express or implied. # See the License", "return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens =", "item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions", "0. cnt = 0. if any(rr < 10 for rr", "the License. # You may obtain a copy of the", "+ \".correct\", \"w\"), cls=NumpyEncoder) # Return metrics. metrics = {", "= num_correct / len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file,", "# See the License for the specific language governing permissions", "Copyright 2018 The Google AI Language Team Authors. # #", "[ee for ee in result[\"layer_%d_ent\" % i] if ee !=", "evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1 and EM scores.\"\"\" f1 =", "% hop], }) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\":", "%.3f Micro-R %.3f Micro-F %.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2])) def", "main(_): eval_type = \"hotpot\" if eval_type == \"hotpot\": test_hotpot_eval() else:", "json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results,", "i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee in", "predictions[key] m = data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1],", "macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and ground", "ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] = stats[0] /", "len(all_predictions) return metrics def normalize_answer(s): \"\"\"Lower text and remove punctuation,", "results.\"\"\" import codecs import collections import gzip import json import", "dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex in dataset.examples} #", "%.3f Micro-F %.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type", "result[\"layer_%d_ent\" % i] if ee != -1]) num_layer_entities[i] += len(layer_entities[i])", "recall) return f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\" return", "answer to ground truth to return TP / FP stats.\"\"\"", "statistics of rare relations for different thresholds.\"\"\" for thresh in", "ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split()", "+ \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\",", "predictions: continue g = data_[key][2:] a = predictions[key] m =", "= predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score,", "0.]), 0 for relation, (stats, _) in relationwise.items(): if relation2counts.get(relation,", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "\"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]),", "* r / (p + r) return np.asarray([p, r, f])", "range(layer_weights.shape[0])} for result in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds =", "answer = simplify(answer) result = np.zeros(4) if gold: result[3] +=", "writing, software # distributed under the License is distributed on", "and item[\"is_impossible\"]: continue if item[\"object\"] is None: continue if isinstance(item[\"object\"][\"mention\"],", "100.0 * exact_match / total f1 = 100.0 * f1", "truth and return data structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\"))", "model. **kwargs: Variable keyword arguments. Returns: metrics: A dict mapping", "store predictions to. **kwargs: Variable keyword arguments. Returns: metrics: A", "to values. \"\"\" del kwargs # Collect ground truth answers.", "if qid not in predictions: if not skip_no_answer: message =", "\"\"\"Compute F1 and EM scores.\"\"\" f1 = exact_match = total", "1 return telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns precision, recall and", "rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\"", "+= 1 else: freq_stats += stats freq_total += 1 rare_stats", "/ len(all_predictions) for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" % i] =", "collections import gzip import json import random import re import", "\"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for hop in range(3): if", "predicted_answers_file): \"\"\"Read results and ground truth and return data structure", "+ 1) if ans: aps.append(ap / len(ans)) else: aps.append(0.) found", "def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \", text) def white_space_fix(text): return", "comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] =", "%.3f Micro-F %.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P", "\"\"\"Lower text and remove punctuation, articles and extra whitespace.\"\"\" def", "# Return metrics. metrics = { \"accuracy\": accuracy, } for", "relationwise.items()]) macro = macro / len(relationwise) return micro, macro, relationwise,", "np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found = True if key", "= np.zeros(4) if gold: result[3] += 1 if answer in", "layer_weights[i] / len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i] / len(all_predictions)", "prediction, ground_truths) exact_match = 100.0 * exact_match / total f1", "= gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example = json.loads(line)", "keyword arguments. Returns: metrics: A dict mapping metric names to", "[] for result in results: qas_id = result[\"qas_ids\"] prediction =", "% hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" %", "enumerate(f): example = json.loads(line) if i == 0 and \"header\"", "def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions = json.load(f) return", "/ bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] = single_acc", "0. f = 2 * p * r / (p", "and stats[3] > 0.: incorrect.append(key) n += 1 return telemetry,", "for item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel,", "\"\"\" del kwargs # Collect ground truth answers. gt_answer =", "ranks): single_acc += 1 if ranks.shape[0] == 0: no_answer +=", "for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not", "prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0", "gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques =", "in accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] = sum(aps) / len(all_predictions)", "gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"],", "% hop in result: incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" %", "if found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found =", "* p * r / (p + r) return np.asarray([p,", "* exact_match / total f1 = 100.0 * f1 /", "% hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" %", "FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with", "app from absl import flags from bert import tokenization from", "HotpotQADataset. Args: dataset: An object of type HotpotQADataset. results: A", "\"hotpot\": test_hotpot_eval() else: micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file)", "ex.answer_entity for ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text for", "tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) #", "stats[0] / stats[1] return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file,", "10: 0., 20: 0.} aps = [] no_answer = 0.", "[] for ground_truth in ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score)", "500, 1000]: freq_stats, freq_total = np.array([0., 0., 0.]), 0 rare_stats,", "incorrect_results, correct_results = [], [] for result in results: qas_id", "unicodedata.normalize(\"NFD\", text) output = [] for char in text: if", "- layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i] = [ # name_map[str(ee)]", "hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) else: incorrect_results.append({", "stats in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] = stats[0] / stats[1]", "= {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute basic", "prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) &", "f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for i,", "and # limitations under the License. # Lint as: python3", "{ch: [0., 0.] for ch in inf_chain.values()} incorrect_results, correct_results =", "isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]: data_[item[\"id\"]]", "all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds)", "articles: toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of", "def read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\" answers = {} f", "def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for", "NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for numpy types.\"\"\" def default(self, obj):", "relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and ground truth", "statistics.\"\"\" tp, _, sys_pos, real_pos = sum([x[-1] for x in", "\"an\", \"and\", \"\"} for t in answer.strip().lower().split(): tok = strip_accents_and_punct(t)", "result[0] += 1 else: if not answer: result[1] += 1", "for char in text: if char in PUNCTUATION: continue cat", "as read: data_ = {} for line in read: item", "else 0. f = 2 * p * r /", "result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\"", "in data_: if key not in predictions: continue g =", "np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj) elif isinstance(obj, (np.float_, np.float16,", "% hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" %", "0 for key in data_: if key not in predictions:", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "/ (p + r) return np.asarray([p, r, f]) def score(gold,", "fix return obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map,", "\"\".join(output) def simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks = [] articles", "None, \"JSON file with relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json", "1 if answer: result[2] += 1 return result def strip_accents_and_punct(text):", "num_same / len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "=\", thresh, \"rare\", rare_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\"", "f = gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example =", "HotpotQADataset. results: A list of result dicts from running estimator.predict.", "\"JSON file with relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder", "accents from a piece of text.\"\"\" text = unicodedata.normalize(\"NFD\", text)", "qid, ground_truths in answers.items(): if qid not in predictions: if", "scores.\"\"\" f1 = exact_match = total = 0 for qid,", "= normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter(", "= read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro", "{i: 0. for i in range(layer_weights.shape[0])} for result in results:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "else: freq_stats += stats freq_total += 1 rare_stats /= rare_total", "/ len(ground_truth_tokens) f1 = (2 * precision * recall) /", "= set(string.punctuation) return \"\".join(ch for ch in text if ch", "list of result dicts from running estimator.predict. name_map: A mapping", "types.\"\"\" def default(self, obj): if isinstance(obj, (np.int_, np.intc, np.intp, np.int8,", "rare_total freq_stats /= freq_total print( \"Threshold =\", thresh, \"rare\", rare_total,", "+= 1 ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap", "in g]) if tp == 0: p = r =", "answer: result[1] += 1 if answer: result[2] += 1 return", "string import unicodedata from absl import app from absl import", "predictions def read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\" answers = {}", "stats[1] return metrics def multihop_eval_fn(dataset, results, name_map, output_prediction_file, supervision=\"mention\", **kwargs):", "read: data_ = {} for line in read: item =", "key == 10: if my_type == \"bridge\": bridge_acc += 1", "kwargs # Collect ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0]", "if prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)]", "and F score for each relation.\"\"\" rel_to_stats = collections.defaultdict(list) for", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "result[1] += 1 if answer: result[2] += 1 return result", "tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder) # Return metrics. metrics =", "gt_types = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute", "exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths)", "= num_new_entities[i] / len(all_predictions) return metrics def normalize_answer(s): \"\"\"Lower text", "exact_match_score(prediction, ground_truth): \"\"\"Compute EM score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth) def", "specific language governing permissions and # limitations under the License.", "isinstance(item[\"relation\"], list): relation = ( item[\"relation\"][0][\"wikidata_id\"] + \"_\" + item[\"relation\"][1][\"wikidata_id\"])", "r = f = 0.0 else: p = tp /", "layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i] = [ # name_map[str(ee)] for", "== 0 and \"header\" in example: continue for qa in", "{ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques = {ex.qas_id: ex.question_text", "f1 = (2 * precision * recall) / (precision +", "all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) # Compute", "predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers,", "obj.tolist() return json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs):", "num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i] / len(all_predictions)", "# Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for", "+= 1 if ranks.shape[0] == 0: no_answer += 1 for", "= \"Unanswered question %s will receive score 0.\" % qid", "type HotpotQADataset. results: A list of result dicts from running", "# you may not use this file except in compliance", "random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file + \".incorrect\", \"w\"), cls=NumpyEncoder) json.dump( random.sample(correct_results, 100),", "Args: dataset: An object of type OneHopDataset. results: A list", "gt_answer[qas_id] my_type = gt_types[qas_id] if my_type == \"bridge\": bridge_tot +=", "micro[0], \"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\":", "i == 0 and \"header\" in example: continue for qa", "[5, 100, 500, 1000]: freq_stats, freq_total = np.array([0., 0., 0.]),", "arguments. Returns: metrics: A dict mapping metric names to values.", "precision, recall and F score for each relation.\"\"\" rel_to_stats =", "f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions =", "metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics. metrics = {\"eval/@%d\"", "Collect ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex", "numpy as np import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "bridge_tot += 1 else: comp_tot += 1 ranks = np.where(np.in1d(preds,", "\"\"\"Read results and ground truth and return data structure with", "in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex in dataset.examples}", "answer: result[2] += 1 return result def strip_accents_and_punct(text): \"\"\"Strips accents", "= {2: 0., 5: 0., 10: 0., 20: 0.} aps", "max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file) as f: predictions = json.load(f)", "if relation2counts.get(relation, 0) < thresh: rare_stats += stats rare_total +=", "def main(_): eval_type = \"hotpot\" if eval_type == \"hotpot\": test_hotpot_eval()", "object of type HotpotQADataset. results: A list of result dicts", "to text strings. output_prediction_file: File to store predictions to. **kwargs:", "i in range(layer_weights.shape[0])} num_new_entities = {i: 0. for i in", "piece of text.\"\"\" text = unicodedata.normalize(\"NFD\", text) output = []", "\"\".join(ch for ch in text if ch not in exclude)", "cat = unicodedata.category(char) if cat == \"Mn\": continue output.append(char) return", "under the Apache License, Version 2.0 (the \"License\"); # you", "num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" % i] =", "data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if \"name\" in item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]]", "in result: incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop], \"dense_%d\"", "ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] -", "[0., 0.] for ch in inf_chain.values()} incorrect_results, correct_results = [],", "for ch in text if ch not in exclude) def", "gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for i, line in enumerate(f): example", "elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return float(obj) elif isinstance(obj,", "results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores = result[\"top_vals\"]", "= sum([val[0] for _, val in relationwise.items()]) macro = macro", "for rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return", "Type of supervision used in the model. **kwargs: Variable keyword", "metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions) for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\"", "macro[2], } return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs):", "return metrics def hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation", "1 ap += cnt / (rr + 1) if ans:", "i, pred in enumerate(preds) ] tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy", "results and ground truth and return data structure with stats.\"\"\"", "= num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i] /", "for different thresholds.\"\"\" for thresh in [5, 100, 500, 1000]:", "gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types =", "total += 1 continue total += 1 prediction = predictions[qid]", "hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" % hop:", "as: python3 \"\"\"Evaluate lazy slot filling results.\"\"\" import codecs import", "in relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats += stats", "set(string.punctuation) return \"\".join(ch for ch in text if ch not", "in ranks: cnt += 1 ap += cnt / (rr", "item = json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"] elif", "m = data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1], g,", "{ \"accuracy\": accuracy, } for ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\"", "micro) print(\"Macro\", macro) if FLAGS.relation_counts_file is not None: r2c =", "(rr + 1) if ans: aps.append(ap / len(ans)) else: aps.append(0.)", "= False for key in [2, 5, 10, 20]: if", "ground_truths) exact_match = 100.0 * exact_match / total f1 =", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "item[\"object\"]: data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]: data_[item[\"id\"]] +=", "aprf_relationwise(telemetry) macro = sum([val[0] for _, val in relationwise.items()]) macro", "text = unicodedata.normalize(\"NFD\", text) output = [] for char in", "skip_no_answer=True): answers = read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers, predictions,", "and ground truth and return P, R, F.\"\"\" telemetry, incorrect", "= {i: 0. for i in range(layer_weights.shape[0])} num_new_entities = {i:", "= result[\"predictions\"] if prediction in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0]", "hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\" % hop],", "num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\":", "default(self, obj): if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32,", "tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f = gzip.GzipFile(fileobj=f) for i, line in", "i] = num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i]", "+= 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions)", "{\"exact_match\": exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers =", "all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]): layer_entities[i] = set(", "key not in predictions: continue g = data_[key][2:] a =", "white_space_fix(text): return \" \".join(text.split()) def remove_punc(text): exclude = set(string.punctuation) return", "micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return metrics.", "elif isinstance(obj, (np.ndarray,)): # This is the fix return obj.tolist()", "stats = score(g, a) telemetry.append([m[0], m[1], g, a, stats]) if", "def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute", "{2: 0., 5: 0., 10: 0., 20: 0.} aps =", "for thresh in [5, 100, 500, 1000]: freq_stats, freq_total =", "gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], })", "& collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same == 0:", "data structure with stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_", "each relation.\"\"\" rel_to_stats = collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item)", "% hop: result[\"entity_scores_%d\" % hop], }) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"],", "metrics = { \"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\": micro[1], \"micro-f\":", "dataset.examples} gt_ques = {ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity", "= f = 0.0 else: p = tp / float(sys_pos)", "Lint as: python3 \"\"\"Evaluate lazy slot filling results.\"\"\" import codecs", "all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file,", "answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with relation counts.\")", "collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same == 0: return", "0 and \"header\" in example: continue for qa in example[\"qas\"]:", "Google AI Language Team Authors. # # Licensed under the", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "in dataset.examples} if supervision == \"mention\": gt_answer = gt_mentions else:", "collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same ==", "1.0 * num_same / len(prediction_tokens) recall = 1.0 * num_same", "qid print(message) total += 1 continue total += 1 prediction", "and extra whitespace.\"\"\" def remove_articles(text): return re.sub(r\"\\b(a|an|the)\\b\", \" \", text)", "\"\"\"Returns precision, recall and F of the given statistics.\"\"\" tp,", "5, 10, 20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key] +=", "= gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in", "_ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro) if FLAGS.relation_counts_file", "= tp / float(sys_pos) if sys_pos > 0 else 0.", "read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions", "answers.\"\"\" answers = {} f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"): f", "in predictions: continue g = data_[key][2:] a = predictions[key] m", "micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0] for", "ground truth and return P, R, F.\"\"\" telemetry, incorrect =", "a) telemetry.append([m[0], m[1], g, a, stats]) if stats[0] == 0.", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\":", "result in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores", "Micro-R %.3f Micro-F %.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total,", "\"\"\"Read ground truth answers.\"\"\" answers = {} f = tf.gfile.Open(gold_file)", "Language Team Authors. # # Licensed under the Apache License,", "in dataset.examples} gt_types = {ex.qas_id: ex.inference_chain for ex in dataset.examples}", "items\", len(all_predictions)) accuracy = { key: (num_correct[key] / len(all_predictions)) for", "and EM scores.\"\"\" f1 = exact_match = total = 0", "OneHopDataset or TwoHopDataset. Args: dataset: An object of type OneHopDataset.", "unicodedata from absl import app from absl import flags from", "f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 * exact_match", "result[\"layer_probs\"] layer_entities = {i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id]", "rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def aprf(g): \"\"\"Returns precision,", "(stats, _) in relationwise.items(): if relation2counts.get(relation, 0) < thresh: rare_stats", "% hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1]", "= flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground", "toks = [] articles = {\"the\", \"a\", \"an\", \"and\", \"\"}", "np.array([0., 0., 0.]), 0 for relation, (stats, _) in relationwise.items():", "== \"hotpot\": test_hotpot_eval() else: micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file,", "macro) if FLAGS.relation_counts_file is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise,", "ch in text if ch not in exclude) def lower(text):", "in [5, 100, 500, 1000]: freq_stats, freq_total = np.array([0., 0.,", "if ee != -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i]", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)):", "+= stats rare_total += 1 else: freq_stats += stats freq_total", "exact_match / total f1 = 100.0 * f1 / total", "{} for result in results: qas_id = result[\"qas_ids\"] prediction =", "% hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\" %", "ground_truths): scores_for_ground_truths = [] for ground_truth in ground_truths: my_score =", "if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8,", "\"macro-r\": macro[1], \"macro-f\": macro[2], } return metrics def hotpot_eval_fn(dataset, results,", "predictions = json.load(f) return predictions def read_answers(gold_file): \"\"\"Read ground truth", "x in g]) if tp == 0: p = r", "{} bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot = 0,", "def remove_punc(text): exclude = set(string.punctuation) return \"\".join(ch for ch in", "string.\"\"\" toks = [] articles = {\"the\", \"a\", \"an\", \"and\",", "## Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground truth answers.\")", "EM scores.\"\"\" f1 = exact_match = total = 0 for", "json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics. metrics = {\"eval/@%d\" %", "eval_type == \"hotpot\": test_hotpot_eval() else: micro, macro, rwise, _ =", "stats.\"\"\" with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_ = {} for", "= json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation = item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"],", "incorrect = [], [] n = 0 for key in", "result[\"top_vals\"] ans = gt_answer[qas_id] my_type = gt_types[qas_id] if my_type ==", "tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy = { key: (num_correct[key] /", "continue cat = unicodedata.category(char) if cat == \"Mn\": continue output.append(char)", "def read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and ground truth and return", "the given statistics.\"\"\" tp, _, sys_pos, real_pos = sum([x[-1] for", "freq_total = np.array([0., 0., 0.]), 0 rare_stats, rare_total = np.array([0.,", "= aprf_relationwise(telemetry) macro = sum([val[0] for _, val in relationwise.items()])", "def rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of rare relations for different", "with predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\", None, \"JSON file with", "output_prediction_file: File to store predictions to. supervision: Type of supervision", "+ ch] = stats[0] / stats[1] return metrics def multihop_eval_fn(dataset,", "found = True if key == 10: if my_type ==", "return rel_to_scores def aprf(g): \"\"\"Returns precision, recall and F of", "bert import tokenization from language.labs.drkit import input_fns import numpy as", "counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for numpy types.\"\"\" def", "len(all_predictions) # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro,", "+= metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match", "my_type == \"bridge\": bridge_acc += 1 else: comp_acc += 1", "results: A list of result dicts from running estimator.predict. name_map:", "Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics. metrics", "flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File", "text) def white_space_fix(text): return \" \".join(text.split()) def remove_punc(text): exclude =", "0.} aps = [] no_answer = 0. all_predictions = {}", "R, F.\"\"\" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry)", "compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and ground truth and return P,", "hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy =", "results: qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction ==", "strip_accents_and_punct(text): \"\"\"Strips accents from a piece of text.\"\"\" text =", "File to store predictions to. **kwargs: Variable keyword arguments. Returns:", "accuracy = { key: (num_correct[key] / len(all_predictions)) for key in", "freq_stats[1], freq_stats[2])) def main(_): eval_type = \"hotpot\" if eval_type ==", "len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i] = num_new_entities[i] / len(all_predictions) return metrics", "continue g = data_[key][2:] a = predictions[key] m = data_[key][:2]", "score(g, a) telemetry.append([m[0], m[1], g, a, stats]) if stats[0] ==", "use this file except in compliance with the License. #", "limitations under the License. # Lint as: python3 \"\"\"Evaluate lazy", "in item and item[\"is_impossible\"]: continue if item[\"object\"] is None: continue", "!= -1]) num_layer_entities[i] += len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0])", "codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions = json.load(fin) telemetry, incorrect =", "= num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100),", "print( \"Threshold =\", thresh, \"rare\", rare_total, \"Micro-P %.3f Micro-R %.3f", "ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted answers from", "print(\"Macro\", macro) if FLAGS.relation_counts_file is not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file))", "if not answer: result[1] += 1 if answer: result[2] +=", "r) return np.asarray([p, r, f]) def score(gold, answer): \"\"\"Compares answer", "ground_truth in ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths)", "# coding=utf-8 # Copyright 2018 The Google AI Language Team", "= score(g, a) telemetry.append([m[0], m[1], g, a, stats]) if stats[0]", "in dataset.examples} # Compute basic metrics. num_correct = {2: 0.,", "answer): \"\"\"Compares answer to ground truth to return TP /", "0 else 0. f = 2 * p * r", "\".correct\", \"w\"), cls=NumpyEncoder) # Return metrics. metrics = { \"accuracy\":", "= 0., 0. bridge_tot, comp_tot = 0, 0 single_acc =", "strip_accents_and_punct(t) if tok not in articles: toks.append(tok) return \"\".join(toks) def", "OneHopDataset. results: A list of result dicts from running estimator.predict.", "def aprf_relationwise(g): \"\"\"Returns precision, recall and F score for each", "and ground truth and return data structure with stats.\"\"\" with", "incorrect def aprf_relationwise(g): \"\"\"Returns precision, recall and F score for", "item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats", "if tok not in articles: toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise,", "hop], \"entity_%d\" % hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\" % hop:", "0. for i in range(layer_weights.shape[0])} for result in results: qas_id", "def normalize_answer(s): \"\"\"Lower text and remove punctuation, articles and extra", "bridge_acc += 1 else: comp_acc += 1 # Non-accuracy stats", "rel_to_scores def aprf(g): \"\"\"Returns precision, recall and F of the", "re import string import unicodedata from absl import app from", "import unicodedata from absl import app from absl import flags", "item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions = json.load(fin) telemetry,", "and return P, R, F.\"\"\" telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file)", "return {\"exact_match\": exact_match, \"f1\": f1} def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers", "dataset.examples} # Compute basic metrics. num_correct = {2: 0., 5:", "% hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" %", "in results: qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores =", "data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions =", "else: comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0] ranks =", "[relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item and item[\"is_impossible\"]: continue if", "text) output = [] for char in text: if char", "True if key == 10: if my_type == \"bridge\": bridge_acc", "in compliance with the License. # You may obtain a", "\"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop], }) else: incorrect_results.append({ \"qas_id\":", "= result[\"top_vals\"] ans = gt_answer[qas_id] my_type = gt_types[qas_id] if my_type", "0. r = tp / float(real_pos) if real_pos > 0", "[], [] for result in results: qas_id = result[\"qas_ids\"] prediction", "total = 0 for qid, ground_truths in answers.items(): if qid", "software # distributed under the License is distributed on an", "\"\"\"Pre-process answer string.\"\"\" toks = [] articles = {\"the\", \"a\",", "\"File with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted", "num_same / len(ground_truth_tokens) f1 = (2 * precision * recall)", "\"dense_scores_%d\" % hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\" % hop: result[\"mention_scores_%d\"", "in relationwise.items()]) macro = macro / len(relationwise) return micro, macro,", "%.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type = \"hotpot\"", "inf_chain = {ex.qas_id: ex.inference_chain for ex in dataset.examples} # Compute", "# name_map[str(ee)] for ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)],", "# Compute basic metrics. num_correct = {2: 0., 5: 0.,", "answers.items(): if qid not in predictions: if not skip_no_answer: message", "json.load(f) return predictions def read_answers(gold_file): \"\"\"Read ground truth answers.\"\"\" answers", "ans: aps.append(ap / len(ans)) else: aps.append(0.) found = False for", "is None: continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]] += [item[\"object\"][\"mention\"][\"text\"]] if", "preds[:key]).all(): num_correct[key] += 1 found = True if key ==", "10, 20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1", "= {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques = {ex.qas_id:", "= [], [] n = 0 for key in data_:", "gt_types[qas_id] if my_type == \"bridge\": bridge_tot += 1 else: comp_tot", "ex.answer_entity[0] for ex in dataset.examples} # Compute basic metrics. num_correct", "encoder for numpy types.\"\"\" def default(self, obj): if isinstance(obj, (np.int_,", "normalize_answer(s): \"\"\"Lower text and remove punctuation, articles and extra whitespace.\"\"\"", "== gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy =", "num_same = sum(common.values()) if num_same == 0: return 0 precision", "result[\"predictions\"] if prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] =", "= normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same =", "= 0. all_predictions = {} bridge_acc, comp_acc = 0., 0.", "message = \"Unanswered question %s will receive score 0.\" %", "ex.answer_mention[0] for ex in dataset.examples} if supervision == \"mention\": gt_answer", "if \"aliases\" in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\"))", "[] n = 0 for key in data_: if key", "results: qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction in", "hop: result[\"entity_scores_%d\" % hop], }) else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\":", "+= 1 return telemetry, incorrect def aprf_relationwise(g): \"\"\"Returns precision, recall", "given statistics.\"\"\" tp, _, sys_pos, real_pos = sum([x[-1] for x", "for HotpotQADataset. Args: dataset: An object of type HotpotQADataset. results:", "type OneHopDataset. results: A list of result dicts from running", "5: 0., 10: 0., 20: 0.} aps = [] no_answer", "def mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions = read_predictions(predictions_file)", "data_ = {} for line in read: item = json.loads(line.strip())", "int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)): return float(obj) elif", "name_map: A mapping from prediction indices to text strings. output_prediction_file:", "with the License. # You may obtain a copy of", "# Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _,", "inf_chain.values()} incorrect_results, correct_results = [], [] for result in results:", "f = 2 * p * r / (p +", "\"w\")) micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file) # Return", "+= 1 rare_stats /= rare_total freq_stats /= freq_total print( \"Threshold", "for i, line in enumerate(f): example = json.loads(line) if i", "<gh_stars>1000+ # coding=utf-8 # Copyright 2018 The Google AI Language", "\"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id], \"inf-chain\": inf_chain[qas_id], \"predictions\": result[\"predictions\"], }) for", "pred in enumerate(preds) ] tf.logging.info(\"Evaluated %d items\", len(all_predictions)) accuracy =", "from a piece of text.\"\"\" text = unicodedata.normalize(\"NFD\", text) output", "ground_truth): \"\"\"Compute EM score.\"\"\" return normalize_answer(prediction) == normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn,", "% hop: result[\"mention_%d\" % hop], \"entity_%d\" % hop: result[\"entity_%d\" %", "Return metrics. metrics = { \"accuracy\": accuracy, \"micro-p\": micro[0], \"micro-r\":", "mrqa_eval_fn(dataset_file, predictions_file, skip_no_answer=True): answers = read_answers(dataset_file) predictions = read_predictions(predictions_file) return", "stats rare_total += 1 else: freq_stats += stats freq_total +=", "# all_predictions[qas_id][\"layer_%d\" % i] = [ # name_map[str(ee)] for ee", "express or implied. # See the License for the specific", "def compute_scores(ground_truth_file, predicted_answers_file): \"\"\"Read predictions and ground truth and return", "except in compliance with the License. # You may obtain", "ee in layer_entities[i]] all_predictions[qas_id][\"predictions\"] = [ (name_map[str(pred)], str(scores[i])) for i,", "a = predictions[key] m = data_[key][:2] stats = score(g, a)", "+= stats freq_total += 1 rare_stats /= rare_total freq_stats /=", "dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain for ex in dataset.examples} #", "0.\" % qid print(message) total += 1 continue total +=", "qas_id = result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores = result[\"top_vals\"] ans", "json.JSONEncoder.default(self, obj) def wikimovie_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation", "/ len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file +", "20]: if found or np.in1d(ans, preds[:key]).all(): num_correct[key] += 1 found", "item and item[\"is_impossible\"]: continue if item[\"object\"] is None: continue if", "accuracy[10] metrics[\"eval/map\"] = sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc /", "answers def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1 and EM scores.\"\"\"", "(freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type = \"hotpot\" if eval_type", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "hop: result[\"mention_%d\" % hop], \"entity_%d\" % hop: result[\"entity_%d\" % hop],", "np.float16, np.float32, np.float64)): return float(obj) elif isinstance(obj, (np.ndarray,)): # This", "or TwoHopDataset. Args: dataset: An object of type OneHopDataset. results:", "hop: result[\"mention_scores_%d\" % hop], \"entity_scores_%d\" % hop: result[\"entity_scores_%d\" % hop],", "+= len(layer_entities[i]) num_new_entities[i] += len(layer_entities[i] - layer_entities[0]) # all_predictions[qas_id][\"layer_%d\" %", "the model. **kwargs: Variable keyword arguments. Returns: metrics: A dict", "Collect ground truth answers. gt_answer = {ex.qas_id: ex.answer_entity for ex", "CONDITIONS OF ANY KIND, either express or implied. # See", "% key: accuracy[key] for key in accuracy} metrics[\"accuracy\"] = accuracy[10]", "= [] no_answer = 0. all_predictions = {} bridge_acc, comp_acc", "f = 0.0 else: p = tp / float(sys_pos) if", "as tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required", "i in range(layer_weights.shape[0])} all_predictions[qas_id] = {} for i in range(layer_weights.shape[0]):", "json encoder for numpy types.\"\"\" def default(self, obj): if isinstance(obj,", "% (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f Micro-R %.3f", "gold = set([simplify(g) for g in gold]) answer = simplify(answer)", "remove_punc(text): exclude = set(string.punctuation) return \"\".join(ch for ch in text", "output = [] for char in text: if char in", "+= 1 else: comp_tot += 1 ranks = np.where(np.in1d(preds, ans))[0]", "num_layer_entities = {i: 0. for i in range(layer_weights.shape[0])} num_new_entities =", "advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return metrics. metrics =", "in answer.strip().lower().split(): tok = strip_accents_and_punct(t) if tok not in articles:", "read_results(ground_truth_file, predicted_answers_file): \"\"\"Read results and ground truth and return data", "rare_relation_scores(relationwise, relation2counts): \"\"\"Print statistics of rare relations for different thresholds.\"\"\"", "+= 1 return result def strip_accents_and_punct(text): \"\"\"Strips accents from a", "in num_correct } # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\"))", "/ len(all_predictions) metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\"", "= {} for rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats),", "0. all_predictions = {} bridge_acc, comp_acc = 0., 0. bridge_tot,", "{ex.qas_id: ex.subject_entity[0] for ex in dataset.examples} inf_chain = {ex.qas_id: ex.inference_chain", "ground_truths in answers.items(): if qid not in predictions: if not", "answers. gt_answer = {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_ques", "def strip_accents_and_punct(text): \"\"\"Strips accents from a piece of text.\"\"\" text", "name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for OneHopDataset or TwoHopDataset.", "\"micro-r\": micro[1], \"micro-f\": micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2],", "len(all_predictions)) accuracy = { key: (num_correct[key] / len(all_predictions)) for key", "qas_id = result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction in gt_answer[qas_id]:", "def default(self, obj): if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16,", "answer string.\"\"\" toks = [] articles = {\"the\", \"a\", \"an\",", "gold: result[3] += 1 if answer in gold: result[0] +=", "0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0. for i", "FP stats.\"\"\" if gold: gold = set([simplify(g) for g in", "f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens =", "for x in g]) if tp == 0: p =", "prediction = result[\"predictions\"] if prediction == gt_answer[qas_id]: num_correct += 1", "answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in dataset.examples} if", "1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\":", "+= 1 else: comp_acc += 1 # Non-accuracy stats layer_weights", "# Non-accuracy stats layer_weights += result[\"layer_probs\"] layer_entities = {i: set()", "macro / len(relationwise) return micro, macro, relationwise, incorrect def read_results(ground_truth_file,", "metric_max_over_ground_truths(f1_score, prediction, ground_truths) exact_match = 100.0 * exact_match / total", "chain2stats = {ch: [0., 0.] for ch in inf_chain.values()} incorrect_results,", "with codecs.getreader(\"utf-8\")(tf.gfile.GFile(ground_truth_file, \"r\")) as read: data_ = {} for line", "= {} for i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee", "bridge_tot metrics[\"eval/comparison_accuracy\"] = comp_acc / comp_tot metrics[\"analysis/single_accuracy\"] = single_acc /", "score 0.\" % qid print(message) total += 1 continue total", "== 0. and stats[3] > 0.: incorrect.append(key) n += 1", "+= item[\"object\"][\"aliases\"].keys() with codecs.getreader(\"utf-8\")(tf.gfile.GFile(predicted_answers_file, \"r\")) as fin: predictions = json.load(fin)", "tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS = flags.FLAGS ##", "np.uint16, np.uint32, np.uint64)): return int(obj) elif isinstance(obj, (np.float_, np.float16, np.float32,", "= no_answer / len(all_predictions) for i in range(layer_weights.shape[0]): metrics[\"analysis/layer_weight_%d\" %", "\"\"\"Strips accents from a piece of text.\"\"\" text = unicodedata.normalize(\"NFD\",", "ans))[0] ranks = np.sort(ranks) ap = 0. cnt = 0.", "if ch not in exclude) def lower(text): return text.lower() return", "= macro / len(relationwise) return micro, macro, relationwise, incorrect def", "fin: predictions = json.load(fin) telemetry, incorrect = [], [] n", "chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id],", "the License. # Lint as: python3 \"\"\"Evaluate lazy slot filling", "dataset: An object of type OneHopDataset. results: A list of", "comp_acc += 1 # Non-accuracy stats layer_weights += result[\"layer_probs\"] layer_entities", "> 0 else 0. r = tp / float(real_pos) if", "{\"the\", \"a\", \"an\", \"and\", \"\"} for t in answer.strip().lower().split(): tok", "Micro-F %.3f\" % (freq_stats[0], freq_stats[1], freq_stats[2])) def main(_): eval_type =", "text.\"\"\" text = unicodedata.normalize(\"NFD\", text) output = [] for char", "under the License. # Lint as: python3 \"\"\"Evaluate lazy slot", "json.dump( random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder) # Return", "return float(obj) elif isinstance(obj, (np.ndarray,)): # This is the fix", "char in PUNCTUATION: continue cat = unicodedata.category(char) if cat ==", "return \"\".join(ch for ch in text if ch not in", "= {} chain2stats = {ch: [0., 0.] for ch in", "import collections import gzip import json import random import re", "from running estimator.predict. name_map: A mapping from prediction indices to", "basic metrics. num_correct = 0. all_predictions = {} for result", "common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if", "= result[\"top_idx\"] scores = result[\"top_vals\"] ans = gt_answer[qas_id] my_type =", "in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee in result[\"layer_%d_ent\"", "result[\"qas_ids\"].decode(\"utf-8\") preds = result[\"top_idx\"] scores = result[\"top_vals\"] ans = gt_answer[qas_id]", "/ len(prediction_tokens) recall = 1.0 * num_same / len(ground_truth_tokens) f1", "== \"mention\": gt_answer = gt_mentions else: gt_answer = {ex.qas_id: ex.answer_entity[0]", "/ len(all_predictions) return metrics def normalize_answer(s): \"\"\"Lower text and remove", "_, val in relationwise.items()]) macro = macro / len(relationwise) return", "in gt_answer[qas_id]: num_correct += 1 chain2stats[inf_chain[qas_id]][0] += 1 correct_results.append({ \"qas_id\":", "p = r = f = 0.0 else: p =", "+ r) return np.asarray([p, r, f]) def score(gold, answer): \"\"\"Compares", "metrics[\"analysis/num_entities_%d\" % i] = num_layer_entities[i] / len(all_predictions) metrics[\"analysis/num_new_entities_%d\" % i]", "/ (precision + recall) return f1 def exact_match_score(prediction, ground_truth): \"\"\"Compute", "text strings. output_prediction_file: File to store predictions to. **kwargs: Variable", "%s will receive score 0.\" % qid print(message) total +=", "hop], \"sparse_scores_%d\" % hop: result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" % hop:", "recall and F score for each relation.\"\"\" rel_to_stats = collections.defaultdict(list)", "range(layer_weights.shape[0])} num_new_entities = {i: 0. for i in range(layer_weights.shape[0])} for", "flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\", None, \"File with ground truth", "for i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for ee", "recall = 1.0 * num_same / len(ground_truth_tokens) f1 = (2", "num_correct = 0. all_predictions = {} chain2stats = {ch: [0.,", "test_hotpot_eval() else: micro, macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\",", "= {ex.qas_id: ex.answer_entity for ex in dataset.examples} gt_types = {ex.qas_id:", "def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = [] for ground_truth in", "= collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores = {}", "% hop], \"entity_%d\" % hop: result[\"entity_%d\" % hop], \"sparse_scores_%d\" %", "1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id],", "F1 and EM scores.\"\"\" f1 = exact_match = total =", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "} for ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] =", "= 1.0 * num_same / len(prediction_tokens) recall = 1.0 *", "== normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = [] for", "hotpot_eval_fn(dataset, results, name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for HotpotQADataset.", "sum([x[-1] for x in g]) if tp == 0: p", "with ground truth answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted answers", "i, line in enumerate(f): example = json.loads(line) if i ==", "%.3f Micro-R %.3f Micro-F %.3f\" % (rare_stats[0], rare_stats[1], rare_stats[2]), \"freq\",", "for i in range(layer_weights.shape[0])} num_new_entities = {i: 0. for i", "cnt / (rr + 1) if ans: aps.append(ap / len(ans))", "= [ (name_map[str(pred)], str(scores[i])) for i, pred in enumerate(preds) ]", "line in read: item = json.loads(line.strip()) if isinstance(item[\"relation\"], dict): relation", "{} for rel, stats in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)]", "normalize_answer(ground_truth).split() common = collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values())", "0: p = r = f = 0.0 else: p", "metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] = sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] =", "% hop in result: correct_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" %", "in gold]) answer = simplify(answer) result = np.zeros(4) if gold:", "prediction == gt_answer[qas_id]: num_correct += 1 all_predictions[qas_id] = name_map[str(prediction)] accuracy", "hop in range(3): if \"sparse_%d\" % hop in result: incorrect_results[-1].update({", "metrics = {\"eval/@%d\" % key: accuracy[key] for key in accuracy}", "in rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def aprf(g):", "num_correct / len(all_predictions) json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) json.dump( random.sample(incorrect_results, 100), tf.gfile.Open(output_prediction_file", "in range(3): if \"sparse_%d\" % hop in result: correct_results[-1].update({ \"sparse_%d\"", "0 for qid, ground_truths in answers.items(): if qid not in", "= {ex.qas_id: ex.question_text for ex in dataset.examples} gt_entity = {ex.qas_id:", "= {\"eval/@%d\" % key: accuracy[key] for key in accuracy} metrics[\"accuracy\"]", "predicted_answers_file) micro = aprf(telemetry) relationwise = aprf_relationwise(telemetry) macro = sum([val[0]", "ground truth answers. gt_mentions = {ex.qas_id: ex.answer_mention[0] for ex in", "continue if item[\"object\"] is None: continue if isinstance(item[\"object\"][\"mention\"], dict): data_[item[\"id\"]]", "output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for HotpotQADataset. Args: dataset: An", "20: 0.} aps = [] no_answer = 0. all_predictions =", "metrics[\"eval/map\"] = sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"] = bridge_acc / bridge_tot", "len(ground_truth_tokens) f1 = (2 * precision * recall) / (precision", "Version 2.0 (the \"License\"); # you may not use this", "in range(3): if \"sparse_%d\" % hop in result: incorrect_results[-1].update({ \"sparse_%d\"", "mapping from prediction indices to text strings. output_prediction_file: File to", "white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth): \"\"\"Compute F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split()", "else: comp_acc += 1 # Non-accuracy stats layer_weights += result[\"layer_probs\"]", "[] articles = {\"the\", \"a\", \"an\", \"and\", \"\"} for t", "gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} # Compute", "result[\"sparse_scores_%d\" % hop], \"dense_scores_%d\" % hop: result[\"dense_scores_%d\" % hop], \"mention_scores_%d\"", "= frozenset(string.punctuation) FLAGS = flags.FLAGS ## Required parameters flags.DEFINE_string(\"ground_truth_file\", None,", "\"Unanswered question %s will receive score 0.\" % qid print(message)", "np.asarray([p, r, f]) def score(gold, answer): \"\"\"Compares answer to ground", "if char in PUNCTUATION: continue cat = unicodedata.category(char) if cat", "comp_acc = 0., 0. bridge_tot, comp_tot = 0, 0 single_acc", "if i == 0 and \"header\" in example: continue for", "exclude) def lower(text): return text.lower() return white_space_fix(remove_articles(remove_punc(lower(s)))) def f1_score(prediction, ground_truth):", "recall and F of the given statistics.\"\"\" tp, _, sys_pos,", "0. for i in range(layer_weights.shape[0])} num_new_entities = {i: 0. for", "data_[item[\"id\"]] += [item[\"object\"][\"name\"]] if \"aliases\" in item[\"object\"]: data_[item[\"id\"]] += item[\"object\"][\"aliases\"].keys()", "by applicable law or agreed to in writing, software #", "with relation counts.\") class NumpyEncoder(json.JSONEncoder): \"\"\"Special json encoder for numpy", "tp == 0: p = r = f = 0.0", "rel_to_stats.items(): rel_to_scores[rel] = [aprf(stats), len(stats)] return rel_to_scores def aprf(g): \"\"\"Returns", "single_acc = 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0.", "supervision used in the model. **kwargs: Variable keyword arguments. Returns:", "with tf.gfile.Open(prediction_file) as f: predictions = json.load(f) return predictions def", "accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] = sum(aps) / len(all_predictions) metrics[\"eval/bridge_accuracy\"]", "hop], \"mention_%d\" % hop: result[\"mention_%d\" % hop], \"entity_%d\" % hop:", "isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16,", "= single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions) for", "recall) / (precision + recall) return f1 def exact_match_score(prediction, ground_truth):", "= 0, 0 single_acc = 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities", "result[\"predictions\"], }) for hop in range(3): if \"sparse_%d\" % hop", "single_acc += 1 if ranks.shape[0] == 0: no_answer += 1", "freq_stats, freq_total = np.array([0., 0., 0.]), 0 rare_stats, rare_total =", "str(scores[i])) for i, pred in enumerate(preds) ] tf.logging.info(\"Evaluated %d items\",", "accuracy[key] for key in accuracy} metrics[\"accuracy\"] = accuracy[10] metrics[\"eval/map\"] =", "/= freq_total print( \"Threshold =\", thresh, \"rare\", rare_total, \"Micro-P %.3f", "rare_stats[2]), \"freq\", freq_total, \"Micro-P %.3f Micro-R %.3f Micro-F %.3f\" %", "not None: r2c = json.load(tf.gfile.Open(FLAGS.relation_counts_file)) rare_relation_scores(rwise, r2c) if __name__ ==", "ranks = np.where(np.in1d(preds, ans))[0] ranks = np.sort(ranks) ap = 0.", "return answers def evaluate(answers, predictions, skip_no_answer=False): \"\"\"Compute F1 and EM", "if real_pos > 0 else 0. f = 2 *", "= collections.Counter(prediction_tokens) & collections.Counter( ground_truth_tokens) num_same = sum(common.values()) if num_same", "predictions[qid] exact_match += metric_max_over_ground_truths(exact_match_score, prediction, ground_truths) f1 += metric_max_over_ground_truths(f1_score, prediction,", "rel_to_scores = {} for rel, stats in rel_to_stats.items(): rel_to_scores[rel] =", "* num_same / len(ground_truth_tokens) f1 = (2 * precision *", "not in articles: toks.append(tok) return \"\".join(toks) def rare_relation_scores(relationwise, relation2counts): \"\"\"Print", "for ground_truth in ground_truths: my_score = metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return", "metrics[\"analysis/single_accuracy\"] = single_acc / len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions)", "relation.\"\"\" rel_to_stats = collections.defaultdict(list) for item in g: rel_to_stats[item[0]].append(item) rel_to_scores", "= {i: set() for i in range(layer_weights.shape[0])} all_predictions[qas_id] = {}", "random.sample(correct_results, 100), tf.gfile.Open(output_prediction_file + \".correct\", \"w\"), cls=NumpyEncoder) # Return metrics.", "obj): if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64,", "predicted_answers_file): \"\"\"Read predictions and ground truth and return P, R,", "evaluation metrics for OneHopDataset or TwoHopDataset. Args: dataset: An object", "qa in example[\"qas\"]: answers[qa[\"qid\"]] = qa[\"answers\"] f.close() return answers def", "read_answers(dataset_file) predictions = read_predictions(predictions_file) return evaluate(answers, predictions, skip_no_answer) def compute_scores(ground_truth_file,", "+= 1 correct_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\":", "applicable law or agreed to in writing, software # distributed", "= strip_accents_and_punct(t) if tok not in articles: toks.append(tok) return \"\".join(toks)", "for i in range(layer_weights.shape[0])} for result in results: qas_id =", "\"accuracy\": accuracy, } for ch, stats in chain2stats.items(): metrics[\"inference-chains-acc/\" +", "}) for hop in range(3): if \"sparse_%d\" % hop in", "any(rr < 10 for rr in ranks): single_acc += 1", "ch in inf_chain.values()} incorrect_results, correct_results = [], [] for result", "as np import tensorflow.compat.v1 as tf PUNCTUATION = frozenset(string.punctuation) FLAGS", "name_map, output_prediction_file, **kwargs): \"\"\"Compute evaluation metrics for HotpotQADataset. Args: dataset:", "normalize_answer(ground_truth) def metric_max_over_ground_truths(metric_fn, prediction, ground_truths): scores_for_ground_truths = [] for ground_truth", "License. # Lint as: python3 \"\"\"Evaluate lazy slot filling results.\"\"\"", "% hop: result[\"dense_%d\" % hop], \"mention_%d\" % hop: result[\"mention_%d\" %", "float(obj) elif isinstance(obj, (np.ndarray,)): # This is the fix return", "else: gt_answer = {ex.qas_id: ex.answer_entity[0] for ex in dataset.examples} #", "= 0. layer_weights = np.zeros_like(results[0][\"layer_probs\"]) num_layer_entities = {i: 0. for", "p = tp / float(sys_pos) if sys_pos > 0 else", "Authors. # # Licensed under the Apache License, Version 2.0", "for ex in dataset.examples} if supervision == \"mention\": gt_answer =", "/ len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions) for i in", "= result[\"qas_ids\"] prediction = result[\"predictions\"] if prediction == gt_answer[qas_id]: num_correct", "in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for ex in dataset.examples}", "0. bridge_tot, comp_tot = 0, 0 single_acc = 0. layer_weights", "import gzip import json import random import re import string", "= 100.0 * exact_match / total f1 = 100.0 *", "# You may obtain a copy of the License at", "if \"sparse_%d\" % hop in result: incorrect_results[-1].update({ \"sparse_%d\" % hop:", "r = tp / float(real_pos) if real_pos > 0 else", "= 100.0 * f1 / total return {\"exact_match\": exact_match, \"f1\":", "data_[key][:2] stats = score(g, a) telemetry.append([m[0], m[1], g, a, stats])", "input_fns import numpy as np import tensorflow.compat.v1 as tf PUNCTUATION", "telemetry, incorrect = read_results(ground_truth_file, predicted_answers_file) micro = aprf(telemetry) relationwise =", "/ len(relationwise) return micro, macro, relationwise, incorrect def read_results(ground_truth_file, predicted_answers_file):", "\"header\" in example: continue for qa in example[\"qas\"]: answers[qa[\"qid\"]] =", "metrics. metrics = { \"accuracy\": accuracy, } for ch, stats", "truth answers.\"\"\" answers = {} f = tf.gfile.Open(gold_file) if gold_file.endswith(\".gz\"):", "simplify(answer): \"\"\"Pre-process answer string.\"\"\" toks = [] articles = {\"the\",", "all_predictions = {} for result in results: qas_id = result[\"qas_ids\"]", "ex.question_text for ex in dataset.examples} gt_entity = {ex.qas_id: ex.subject_entity[0] for", "def aprf(g): \"\"\"Returns precision, recall and F of the given", "(np.int_, np.intc, np.intp, np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32,", "macro, rwise, _ = compute_scores(FLAGS.ground_truth_file, FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro)", "= item[\"relation\"][\"wikidata_id\"] elif isinstance(item[\"relation\"], list): relation = ( item[\"relation\"][0][\"wikidata_id\"] +", "\"\"\"Compute F1 score.\"\"\" prediction_tokens = normalize_answer(prediction).split() ground_truth_tokens = normalize_answer(ground_truth).split() common", "to return TP / FP stats.\"\"\" if gold: gold =", "answers.\") flags.DEFINE_string(\"predicted_answers_file\", None, \"File with predicted answers from model.\") flags.DEFINE_string(\"relation_counts_file\",", "[] no_answer = 0. all_predictions = {} bridge_acc, comp_acc =", "in chain2stats.items(): metrics[\"inference-chains-acc/\" + ch] = stats[0] / stats[1] return", "> 0 else 0. f = 2 * p *", "np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj) elif isinstance(obj, (np.float_,", "json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _, _ = compute_scores(dataset.gt_file, output_prediction_file)", "%d items\", len(all_predictions)) accuracy = { key: (num_correct[key] / len(all_predictions))", "metrics. num_correct = 0. all_predictions = {} chain2stats = {ch:", "ap += cnt / (rr + 1) if ans: aps.append(ap", "hop: result[\"entity_scores_%d\" % hop], }) chain2stats[inf_chain[qas_id]][1] += 1 all_predictions[qas_id] =", "text: if char in PUNCTUATION: continue cat = unicodedata.category(char) if", "if gold: result[3] += 1 if answer in gold: result[0]", "len(all_predictions) metrics[\"analysis/no_answers\"] = no_answer / len(all_predictions) for i in range(layer_weights.shape[0]):", "= [] for ground_truth in ground_truths: my_score = metric_fn(prediction, ground_truth)", "micro[2], \"macro-p\": macro[0], \"macro-r\": macro[1], \"macro-f\": macro[2], } return metrics", "= 0. cnt = 0. if any(rr < 10 for", "{} for i in range(layer_weights.shape[0]): layer_entities[i] = set( [ee for", "# Lint as: python3 \"\"\"Evaluate lazy slot filling results.\"\"\" import", "item[\"is_impossible\"]: continue if item[\"object\"] is None: continue if isinstance(item[\"object\"][\"mention\"], dict):", "import app from absl import flags from bert import tokenization", "= {} bridge_acc, comp_acc = 0., 0. bridge_tot, comp_tot =", "eval_type = \"hotpot\" if eval_type == \"hotpot\": test_hotpot_eval() else: micro,", "absl import app from absl import flags from bert import", "\"License\"); # you may not use this file except in", "FLAGS.predicted_answers_file) print(\"Micro\", micro) print(\"Macro\", macro) if FLAGS.relation_counts_file is not None:", "1 all_predictions[qas_id] = name_map[str(prediction)] accuracy = num_correct / len(all_predictions) #", "+ item[\"relation\"][1][\"wikidata_id\"]) data_[item[\"id\"]] = [relation, item[\"subject\"][\"wikidata_id\"]] if \"is_impossible\" in item", "if gold: gold = set([simplify(g) for g in gold]) answer", "in g: rel_to_stats[item[0]].append(item) rel_to_scores = {} for rel, stats in", "aprf_relationwise(g): \"\"\"Returns precision, recall and F score for each relation.\"\"\"", "hop in result: incorrect_results[-1].update({ \"sparse_%d\" % hop: result[\"sparse_%d\" % hop],", "g, a, stats]) if stats[0] == 0. and stats[3] >", "num_correct = 0. all_predictions = {} for result in results:", "== 10: if my_type == \"bridge\": bridge_acc += 1 else:", "strings. output_prediction_file: File to store predictions to. supervision: Type of", "} # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) # Return", "\"bridge\": bridge_tot += 1 else: comp_tot += 1 ranks =", "= metric_fn(prediction, ground_truth) scores_for_ground_truths.append(my_score) return max(scores_for_ground_truths) def read_predictions(prediction_file): with tf.gfile.Open(prediction_file)", "0., 20: 0.} aps = [] no_answer = 0. all_predictions", "exact_match = 100.0 * exact_match / total f1 = 100.0", "for key in [2, 5, 10, 20]: if found or", "[], [] n = 0 for key in data_: if", "0., 0. bridge_tot, comp_tot = 0, 0 single_acc = 0.", "Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) micro, macro, _, _", "= sum(common.values()) if num_same == 0: return 0 precision =", "0 for relation, (stats, _) in relationwise.items(): if relation2counts.get(relation, 0)", "= np.sort(ranks) ap = 0. cnt = 0. if any(rr", "for rr in ranks): single_acc += 1 if ranks.shape[0] ==", "else: incorrect_results.append({ \"qas_id\": result[\"qas_ids\"], \"question\": gt_ques[qas_id], \"answers\": gt_answer[qas_id], \"subject\": gt_entity[qas_id],", "np.int8, np.int16, np.int32, np.int64, np.uint8, np.uint16, np.uint32, np.uint64)): return int(obj)", "key in [2, 5, 10, 20]: if found or np.in1d(ans,", "num_correct } # Compute advanced metrics. json.dump(all_predictions, tf.gfile.Open(output_prediction_file, \"w\")) #", "exclude = set(string.punctuation) return \"\".join(ch for ch in text if" ]
[ "#pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population to check", "= PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex?", "# == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ # \"Error in photon", "different SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD", "= ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population to check our", "\"\"\" test_pop_models.py Author: <NAME> Affiliation: UCLA Created on: Fri Jul", "#assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6),", "sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False pars_2 = sed +", "+ PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic", "= sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) ==", "'internal' sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw", "sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] =", "the above population to check our different SFRD/SED techniques sfrd_pars", "== pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ # \"Error in photon luminosity", "pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False pars_2 = sed", "sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in", "on: Fri Jul 15 15:23:11 PDT 2016 Description: \"\"\" import", "import ares import matplotlib.pyplot as pl PB = ares.util.ParameterBundle def", "Jul 15 15:23:11 PDT 2016 Description: \"\"\" import ares import", "= {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed", "# \"Error in photon luminosity density.\" if __name__ == '__main__':", "= False sed['pop_heat_src_igm'] = False pars_2 = sed + sfrd_pars", "def test(): # Create a simple population pars_1 = PB('pop:fcoll')", "sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] =", "False pars_2 = sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert", "= PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1)", "pars_2 = sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.)", "\"Error in SFRD.\" # Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2,", "sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw']", "import matplotlib.pyplot as pl PB = ares.util.ParameterBundle def test(): #", "# pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False pars_2 =", "ares.util.ParameterBundle def test(): # Create a simple population pars_1 =", "simple population pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1)", "= ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population", "population pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR", "\\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ # \"Error in", "+ sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error", "\\ # \"Error in photon luminosity density.\" if __name__ ==", "pop_sfrd.SFRD(20.), \"Error in SFRD.\" # Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20.,", "False sed['pop_heat_src_igm'] = False pars_2 = sed + sfrd_pars pop_sfrd", "15:23:11 PDT 2016 Description: \"\"\" import ares import matplotlib.pyplot as", "sed['pop_heat_src_igm'] = False pars_2 = sed + sfrd_pars pop_sfrd =", "SFRD.\" # Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20.,", "pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in SFRD.\"", "emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20.,", "techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] =", "sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw #", "Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ # \"Error", "SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units']", "Emin=10.2, Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ #", "Emax=13.6), \\ # \"Error in photon luminosity density.\" if __name__", "= pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False", "check our different SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd']", "15 15:23:11 PDT 2016 Description: \"\"\" import ares import matplotlib.pyplot", "Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ #", "sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal'", "# Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2,", "a simple population pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll =", "<NAME> Affiliation: UCLA Created on: Fri Jul 15 15:23:11 PDT", "sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.),", "ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population to check our different", "PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) #", "to check our different SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'}", "ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in SFRD.\" # Check", "\"Error in photon luminosity density.\" if __name__ == '__main__': test()", "\"\"\" import ares import matplotlib.pyplot as pl PB = ares.util.ParameterBundle", "population to check our different SFRD/SED techniques sfrd_pars = {'pop_sfr_model':", "in SFRD.\" # Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))", "2016 Description: \"\"\" import ares import matplotlib.pyplot as pl PB", "#print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ # ==", "ares import matplotlib.pyplot as pl PB = ares.util.ParameterBundle def test():", "Author: <NAME> Affiliation: UCLA Created on: Fri Jul 15 15:23:11", "pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the above", "pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm']", "assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in SFRD.\" # Check the", "Fri Jul 15 15:23:11 PDT 2016 Description: \"\"\" import ares", "PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the", "pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\", "= 'internal' sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] =", "pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False sed['pop_heat_src_igm'] = False pars_2", "# Mimic the above population to check our different SFRD/SED", "pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6), \\ # \"Error in photon luminosity density.\"", "as pl PB = ares.util.ParameterBundle def test(): # Create a", "Emin=10.2, Emax=13.6), \\ # \"Error in photon luminosity density.\" if", "matplotlib.pyplot as pl PB = ares.util.ParameterBundle def test(): # Create", "Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2,", "UCLA Created on: Fri Jul 15 15:23:11 PDT 2016 Description:", "sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy') sed['pop_Nion']", "ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR = ares.populations.GalaxyPopulation(**pars_1) # Mimic the above population to", "pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion", "= pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy') sed['pop_Nion'] =", "Created on: Fri Jul 15 15:23:11 PDT 2016 Description: \"\"\"", "= pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm'] = False", "pl PB = ares.util.ParameterBundle def test(): # Create a simple", "the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert", "{'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed =", "#print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)", "Description: \"\"\" import ares import matplotlib.pyplot as pl PB =", "Check the emissivities too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6))", "Mimic the above population to check our different SFRD/SED techniques", "too #print(pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2,", "pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in SFRD.\" # Check the emissivities", "test_pop_models.py Author: <NAME> Affiliation: UCLA Created on: Fri Jul 15", "# Create a simple population pars_1 = PB('pop:fcoll') + PB('sed:bpass')", "Affiliation: UCLA Created on: Fri Jul 15 15:23:11 PDT 2016", "PB = ares.util.ParameterBundle def test(): # Create a simple population", "PB('sed:toy') sed['pop_Nion'] = pop_fcoll.src.Nion sed['pop_Nlw'] = pop_fcoll.src.Nlw # pop_Ex? sed['pop_ion_src_igm']", "== pop_sfrd.SFRD(20.), \"Error in SFRD.\" # Check the emissivities too", "Create a simple population pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll", "'sfrd-func'} sfrd_pars['pop_sfrd'] = pop_fcoll.SFRD sfrd_pars['pop_sfrd_units'] = 'internal' sed = PB('sed:toy')", "PDT 2016 Description: \"\"\" import ares import matplotlib.pyplot as pl", "= ares.util.ParameterBundle def test(): # Create a simple population pars_1", "pars_1 = PB('pop:fcoll') + PB('sed:bpass') pop_fcoll = ares.populations.GalaxyPopulation(**pars_1) #pop_fcoll_XR =", "Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\ # == pop_sfrd.PhotonLuminosityDensity(20.,", "above population to check our different SFRD/SED techniques sfrd_pars =", "test(): # Create a simple population pars_1 = PB('pop:fcoll') +", "= ares.populations.GalaxyPopulation(**pars_2) assert pop_fcoll.SFRD(20.) == pop_sfrd.SFRD(20.), \"Error in SFRD.\" #", "our different SFRD/SED techniques sfrd_pars = {'pop_sfr_model': 'sfrd-func'} sfrd_pars['pop_sfrd'] =", "Emin=10.2, Emax=13.6)) #print(pop_sfrd.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6)) #assert pop_fcoll.PhotonLuminosityDensity(20., Emin=10.2, Emax=13.6) \\", "= False pars_2 = sed + sfrd_pars pop_sfrd = ares.populations.GalaxyPopulation(**pars_2)" ]
[ "import time import hashlib import leancloud from leancloud._compat import to_bytes", "time import hashlib import leancloud from leancloud._compat import to_bytes __author__", "hashlib import leancloud from leancloud._compat import to_bytes __author__ = 'asaka", "to_bytes __author__ = 'asaka <<EMAIL>>' def sign_by_key(timestamp, key): return hashlib.md5(to_bytes('{0}{1}'.format(timestamp,", "leancloud._compat import to_bytes __author__ = 'asaka <<EMAIL>>' def sign_by_key(timestamp, key):", "import leancloud from leancloud._compat import to_bytes __author__ = 'asaka <<EMAIL>>'", "import hashlib import leancloud from leancloud._compat import to_bytes __author__ =", "import to_bytes __author__ = 'asaka <<EMAIL>>' def sign_by_key(timestamp, key): return", "utf-8 import time import hashlib import leancloud from leancloud._compat import", "coding: utf-8 import time import hashlib import leancloud from leancloud._compat", "__author__ = 'asaka <<EMAIL>>' def sign_by_key(timestamp, key): return hashlib.md5(to_bytes('{0}{1}'.format(timestamp, key))).hexdigest()", "from leancloud._compat import to_bytes __author__ = 'asaka <<EMAIL>>' def sign_by_key(timestamp,", "leancloud from leancloud._compat import to_bytes __author__ = 'asaka <<EMAIL>>' def", "# coding: utf-8 import time import hashlib import leancloud from" ]
[ "SIM from chap2.mav_viewer import mav_viewer # from chap2.video_writer import video_writer", "sleep # initialize the visualization VIDEO = False # True==write", "mav_view = mav_viewer() # initialize the mav viewer data_view =", "True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) #", "the mav viewer data_view = data_viewer() # initialize view of", "video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize", "if VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation", "current_wind) mav.update_state(delta, current_wind) # propagate the MAV dynamics #-------update viewer-------------", "from chap2.mav_viewer import mav_viewer # from chap2.video_writer import video_writer from", "delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t,", "column vector #-------physical system------------- current_wind = wind.update() # get the", "wind: \", current_wind) mav.update_state(delta, current_wind) # propagate the MAV dynamics", "4 assignment for <NAME>, PUP, 2012 - Update history: 12/27/2018", "estimated states mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO ==", "mav viewer data_view = data_viewer() # initialize view of data", "initialize elements of the architecture wind = wind_simulation(SIM.ts_simulation) mav =", "the simulation time sim_time = SIM.start_time # main simulation loop", "SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t =", "# get the new wind vector # print(\"current wind: \",", "video mav_view = mav_viewer() # initialize the mav viewer data_view", "- Update history: 12/27/2018 - RWB 1/17/2019 - RWB \"\"\"", "initialize the visualization VIDEO = False # True==write video, False==don't", "import video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics", "from time import sleep # initialize the visualization VIDEO =", "write video mav_view = mav_viewer() # initialize the mav viewer", "data_viewer() # initialize view of data plots if VIDEO ==", "history: 12/27/2018 - RWB 1/17/2019 - RWB \"\"\" import sys", "# initialize the visualization VIDEO = False # True==write video,", "mav_viewer() # initialize the mav viewer data_view = data_viewer() #", "view of data plots if VIDEO == True: video =", "wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time", "dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state,", "wind vector # print(\"current wind: \", current_wind) mav.update_state(delta, current_wind) #", "#-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state, #", "- RWB 1/17/2019 - RWB \"\"\" import sys sys.path.append('..') import", "RWB 1/17/2019 - RWB \"\"\" import sys sys.path.append('..') import numpy", "= mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time = SIM.start_time", "SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time +=", "sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e = -0.1", "= 1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta =", "data_view = data_viewer() # initialize view of data plots if", "1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e,", "wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation", "\"\"\" mavsimPy - Chapter 4 assignment for <NAME>, PUP, 2012", "states mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO == True:", "while sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e =", "numpy as np import parameters.simulation_parameters as SIM from chap2.mav_viewer import", "the new wind vector # print(\"current wind: \", current_wind) mav.update_state(delta,", "2012 - Update history: 12/27/2018 - RWB 1/17/2019 - RWB", "MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated states mav.msg_true_state,", "mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state, # true states", "Command-Q to exit...\") while sim_time < SIM.end_time: #-------set control surfaces-------------", "= wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation time", "delta_a = 0.0 # 0.0 delta_r = 0.0 # 0.005", "sim_time = SIM.start_time # main simulation loop # sleep(5) print(\"Press", "-0.1 delta_t = 1.0 # 0.5 delta_a = 0.0 #", "1.0 # 0.5 delta_a = 0.0 # 0.0 delta_r =", "0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to", "\"\"\" import sys sys.path.append('..') import numpy as np import parameters.simulation_parameters", "vector #-------physical system------------- current_wind = wind.update() # get the new", "import numpy as np import parameters.simulation_parameters as SIM from chap2.mav_viewer", "- Chapter 4 assignment for <NAME>, PUP, 2012 - Update", "video, False==don't write video mav_view = mav_viewer() # initialize the", "mav = mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time =", "PUP, 2012 - Update history: 12/27/2018 - RWB 1/17/2019 -", "= 0.0 # 0.0 delta_r = 0.0 # 0.005 delta", "delta_e = -0.1 delta_t = 1.0 # 0.5 delta_a =", "\", current_wind) mav.update_state(delta, current_wind) # propagate the MAV dynamics #-------update", "it a column vector #-------physical system------------- current_wind = wind.update() #", "np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it a", "import data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation", "elements of the architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation)", "transpose to make it a column vector #-------physical system------------- current_wind", "the visualization VIDEO = False # True==write video, False==don't write", "VIDEO = False # True==write video, False==don't write video mav_view", "# plot body of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state,", "MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV", "# initialize view of data plots if VIDEO == True:", "initialize the mav viewer data_view = data_viewer() # initialize view", "delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005 delta", "False==don't write video mav_view = mav_viewer() # initialize the mav", "= SIM.start_time # main simulation loop # sleep(5) print(\"Press Command-Q", "delta_r]]).T # transpose to make it a column vector #-------physical", "= mav_viewer() # initialize the mav viewer data_view = data_viewer()", "<NAME>, PUP, 2012 - Update history: 12/27/2018 - RWB 1/17/2019", "sleep(5) print(\"Press Command-Q to exit...\") while sim_time < SIM.end_time: #-------set", "# 0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose", "= data_viewer() # initialize view of data plots if VIDEO", "0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to", "delta_t, delta_a, delta_r]]).T # transpose to make it a column", "= 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose", "# transpose to make it a column vector #-------physical system-------------", "from chap4.wind_simulation import wind_simulation from time import sleep # initialize", "to make it a column vector else: delta_e = -0.3", "states mav.msg_true_state, # estimated states mav.msg_true_state, # commanded states SIM.ts_simulation)", "mav.msg_true_state, # estimated states mav.msg_true_state, # commanded states SIM.ts_simulation) if", "data plots if VIDEO == True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0,", "for <NAME>, PUP, 2012 - Update history: 12/27/2018 - RWB", "print(\"current wind: \", current_wind) mav.update_state(delta, current_wind) # propagate the MAV", "# sleep(5) print(\"Press Command-Q to exit...\") while sim_time < SIM.end_time:", "video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if VIDEO == True:", "0.5 delta_a = 0.0 # 0.0 delta_r = 0.0 #", "delta_e = -0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r", "== True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video)", "time sim_time = SIM.start_time # main simulation loop # sleep(5)", "#-------increment time------------- sim_time += SIM.ts_simulation if VIDEO == True: video.close()", "<filename>AirplaneLQR/chap4LQR/mavsim_chap4.py<gh_stars>1-10 \"\"\" mavsimPy - Chapter 4 assignment for <NAME>, PUP,", "chap2.video_writer import video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics import", "mav.update_state(delta, current_wind) # propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state)", "# true states mav.msg_true_state, # estimated states mav.msg_true_state, # commanded", "video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics from", "exit...\") while sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e", "# 0.0 delta_r = 0.0 # 0.005 delta = np.array([[delta_e,", "propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body", "get the new wind vector # print(\"current wind: \", current_wind)", "VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if", "make it a column vector #-------physical system------------- current_wind = wind.update()", "0.0 delta_r = 0.0 # 0.005 delta = np.array([[delta_e, delta_t,", "Update history: 12/27/2018 - RWB 1/17/2019 - RWB \"\"\" import", "0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T", "assignment for <NAME>, PUP, 2012 - Update history: 12/27/2018 -", "= wind.update() # get the new wind vector # print(\"current", "wind.update() # get the new wind vector # print(\"current wind:", "mav_viewer # from chap2.video_writer import video_writer from chap3.data_viewer import data_viewer", "vector # print(\"current wind: \", current_wind) mav.update_state(delta, current_wind) # propagate", "data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated states mav.msg_true_state, #", "1000, 1000), output_rate=SIM.ts_video) # initialize elements of the architecture wind", "delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make", "to exit...\") while sim_time < SIM.end_time: #-------set control surfaces------------- if(sim_time<25):", "# estimated states mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO", "chap2.mav_viewer import mav_viewer # from chap2.video_writer import video_writer from chap3.data_viewer", "delta_r]]).T # transpose to make it a column vector else:", "initialize the simulation time sim_time = SIM.start_time # main simulation", "simulation loop # sleep(5) print(\"Press Command-Q to exit...\") while sim_time", "import wind_simulation from time import sleep # initialize the visualization", "loop # sleep(5) print(\"Press Command-Q to exit...\") while sim_time <", "simulation time sim_time = SIM.start_time # main simulation loop #", "# from chap2.video_writer import video_writer from chap3.data_viewer import data_viewer from", "visualization VIDEO = False # True==write video, False==don't write video", "# True==write video, False==don't write video mav_view = mav_viewer() #", "True==write video, False==don't write video mav_view = mav_viewer() # initialize", "else: delta_e = -0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0", "make it a column vector else: delta_e = -0.3 delta_t", "# print(\"current wind: \", current_wind) mav.update_state(delta, current_wind) # propagate the", "current_wind) # propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) #", "from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation from time", "plot body of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, #", "control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t = 1.0 #", "of the architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) #", "0.0 # 0.0 delta_r = 0.0 # 0.005 delta =", "# initialize the mav viewer data_view = data_viewer() # initialize", "commanded states SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment time-------------", "delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T #", "states SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment time------------- sim_time", "parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer # from chap2.video_writer", "RWB \"\"\" import sys sys.path.append('..') import numpy as np import", "= -0.1 delta_t = 1.0 # 0.5 delta_a = 0.0", "- RWB \"\"\" import sys sys.path.append('..') import numpy as np", "1/17/2019 - RWB \"\"\" import sys sys.path.append('..') import numpy as", "# initialize elements of the architecture wind = wind_simulation(SIM.ts_simulation) mav", "import sleep # initialize the visualization VIDEO = False #", "#-------physical system------------- current_wind = wind.update() # get the new wind", "new wind vector # print(\"current wind: \", current_wind) mav.update_state(delta, current_wind)", "1000), output_rate=SIM.ts_video) # initialize elements of the architecture wind =", "bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements of the", "viewer data_view = data_viewer() # initialize view of data plots", "if(sim_time<25): delta_e = -0.1 delta_t = 1.0 # 0.5 delta_a", "chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation from time import", "output_rate=SIM.ts_video) # initialize elements of the architecture wind = wind_simulation(SIM.ts_simulation)", "initialize view of data plots if VIDEO == True: video", "time import sleep # initialize the visualization VIDEO = False", "np import parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer #", "chap4.wind_simulation import wind_simulation from time import sleep # initialize the", "# initialize the simulation time sim_time = SIM.start_time # main", "data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import wind_simulation from", "sys sys.path.append('..') import numpy as np import parameters.simulation_parameters as SIM", "mav_dynamics from chap4.wind_simulation import wind_simulation from time import sleep #", "SIM.start_time # main simulation loop # sleep(5) print(\"Press Command-Q to", "main simulation loop # sleep(5) print(\"Press Command-Q to exit...\") while", "== True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if VIDEO", "True: video.update(sim_time) #-------increment time------------- sim_time += SIM.ts_simulation if VIDEO ==", "architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize the", "= video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements", "as np import parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer", "as SIM from chap2.mav_viewer import mav_viewer # from chap2.video_writer import", "the architecture wind = wind_simulation(SIM.ts_simulation) mav = mav_dynamics(SIM.ts_simulation) # initialize", "delta_t = 1.0 # 0.5 delta_a = 0.0 # 0.0", "import mav_viewer # from chap2.video_writer import video_writer from chap3.data_viewer import", "from chap2.video_writer import video_writer from chap3.data_viewer import data_viewer from chap4.mav_dynamics", "a column vector #-------physical system------------- current_wind = wind.update() # get", "False # True==write video, False==don't write video mav_view = mav_viewer()", "mavsimPy - Chapter 4 assignment for <NAME>, PUP, 2012 -", "# 0.5 delta_a = 0.0 # 0.0 delta_r = 0.0", "sys.path.append('..') import numpy as np import parameters.simulation_parameters as SIM from", "0.0 # 0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T #", "it a column vector else: delta_e = -0.3 delta_t =", "a column vector else: delta_e = -0.3 delta_t = 1.0#0.5", "column vector else: delta_e = -0.3 delta_t = 1.0#0.5 delta_a", "wind_simulation from time import sleep # initialize the visualization VIDEO", "chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation import", "delta_r = 0.0 # 0.005 delta = np.array([[delta_e, delta_t, delta_a,", "if VIDEO == True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000,", "import sys sys.path.append('..') import numpy as np import parameters.simulation_parameters as", "video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements of", "#-------set control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t = 1.0", "import parameters.simulation_parameters as SIM from chap2.mav_viewer import mav_viewer # from", "= 1.0 # 0.5 delta_a = 0.0 # 0.0 delta_r", "to make it a column vector #-------physical system------------- current_wind =", "12/27/2018 - RWB 1/17/2019 - RWB \"\"\" import sys sys.path.append('..')", "0, 1000, 1000), output_rate=SIM.ts_video) # initialize elements of the architecture", "import mav_dynamics from chap4.wind_simulation import wind_simulation from time import sleep", "viewer------------- mav_view.update(mav.msg_true_state) # plot body of MAV data_view.update(mav.msg_true_state, # true", "true states mav.msg_true_state, # estimated states mav.msg_true_state, # commanded states", "= False # True==write video, False==don't write video mav_view =", "vector else: delta_e = -0.3 delta_t = 1.0#0.5 delta_a =", "of data plots if VIDEO == True: video = video_writer(video_name=\"chap4_video.avi\",", "= 0.01#0.0 delta_r = 0.00025#0.005 delta = np.array([[delta_e, delta_t, delta_a,", "# main simulation loop # sleep(5) print(\"Press Command-Q to exit...\")", "print(\"Press Command-Q to exit...\") while sim_time < SIM.end_time: #-------set control", "# commanded states SIM.ts_simulation) if VIDEO == True: video.update(sim_time) #-------increment", "transpose to make it a column vector else: delta_e =", "body of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated", "# transpose to make it a column vector else: delta_e", "the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot body of", "plots if VIDEO == True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0,", "VIDEO == True: video = video_writer(video_name=\"chap4_video.avi\", bounding_box=(0, 0, 1000, 1000),", "< SIM.end_time: #-------set control surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t", "= 0.0 # 0.005 delta = np.array([[delta_e, delta_t, delta_a, delta_r]]).T", "# propagate the MAV dynamics #-------update viewer------------- mav_view.update(mav.msg_true_state) # plot", "from chap3.data_viewer import data_viewer from chap4.mav_dynamics import mav_dynamics from chap4.wind_simulation", "= -0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r =", "-0.3 delta_t = 1.0#0.5 delta_a = 0.01#0.0 delta_r = 0.00025#0.005", "Chapter 4 assignment for <NAME>, PUP, 2012 - Update history:", "mav_dynamics(SIM.ts_simulation) # initialize the simulation time sim_time = SIM.start_time #", "surfaces------------- if(sim_time<25): delta_e = -0.1 delta_t = 1.0 # 0.5", "delta_a, delta_r]]).T # transpose to make it a column vector", "= np.array([[delta_e, delta_t, delta_a, delta_r]]).T # transpose to make it", "current_wind = wind.update() # get the new wind vector #", "system------------- current_wind = wind.update() # get the new wind vector", "mav.msg_true_state, # commanded states SIM.ts_simulation) if VIDEO == True: video.update(sim_time)", "of MAV data_view.update(mav.msg_true_state, # true states mav.msg_true_state, # estimated states" ]
[ "\"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\",", "8.4035] 5: \"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\", #", "functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\"))", "preds preds = mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for", "\"pose_refine\": refined_pose.tolist(), } if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] =", "20: \"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\", # [-0.0805,", "id2obj.items()} if __name__ == \"__main__\": new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\",", "\"036_wood_block\", # [1.4265, -2.5305, 17.1890] 17: \"037_scissors\", # [7.0535, -28.1320,", "1.7005, 4.8050] 3: \"004_sugar_box\", # [-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\",", "\"051_large_clamp\", # [10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\", # [-0.3950, -10.4130,", "PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout,", "0.1620] 21: \"061_foam_brick\", # [-0.0805, 0.0805, -8.2435] } obj_num =", "# [1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\", # [0.5575, 1.7005, 4.8050]", "\"009_gelatin_box\", # [1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\", # [2.4195, 0.3075,", "# [1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625]", "4.2625] 7: \"008_pudding_box\", # [1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\", #", "13: \"024_bowl\", # [-0.2270, 0.7950, -2.9675] 14: \"025_mug\", # [-8.4675,", "= {_name: _id for _id, _name in id2obj.items()} if __name__", "\"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc from lib.utils.bbox_utils", "4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\",", "\"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj for obj in obj2id]", "numpy as np import mmcv from tqdm import tqdm from", "# [2.4195, 0.3075, 8.0715] 10: \"011_banana\", # [-18.6730, 12.1915, -1.4635]", "\"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\",", "= pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)]", "if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path,", "PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path))", "-1.9640, -0.4745] 20: \"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\",", "-1.6145] 15: \"035_power_drill\", # [9.0710, 20.9360, -2.1190] 16: \"036_wood_block\", #", "# [-0.2270, 0.7950, -2.9675] 14: \"025_mug\", # [-8.4675, -0.6995, -1.6145]", "= mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for pred in", "-0.4745] 20: \"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\", #", "osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import", "[2.4195, 0.3075, 8.0715] 10: \"011_banana\", # [-18.6730, 12.1915, -1.4635] 11:", "\"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if", "refined_pose.tolist(), } if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = []", "mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for pred in pred_list:", "obj2id] new_res_dict = {} for obj_name, pred_name in zip(obj_names, pkl_paths):", "import mmcv from tqdm import tqdm from functools import cmp_to_key", "6: \"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\", # [1.1645,", "zip(obj_names, pkl_paths): assert obj_name in pred_name, \"{} not in {}\".format(obj_name,", "-4.2015, 3.1190] 8: \"009_gelatin_box\", # [1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\",", "import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0,", "10: \"011_banana\", # [-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\", # [5.3370,", "pred_list in preds.items(): for pred in pred_list: obj_id = pred[\"obj_id\"]", "score = pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh =", "if osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test", "\"019_pitcher_base\", # [5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\", # [4.9290, -2.4800,", "\"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\", # [-0.0805, 0.0805,", "# [5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920]", "osp import sys import numpy as np import mmcv from", "from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir,", "3.6085] 9: \"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715] 10: \"011_banana\", #", "lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj =", "\"{} not in {}\".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert", "# [9.0710, 20.9360, -2.1190] 16: \"036_wood_block\", # [1.4265, -2.5305, 17.1890]", "in pred_list: obj_id = pred[\"obj_id\"] score = pred[\"score\"] bbox_est =", "0.3500] 19: \"051_large_clamp\", # [10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\", #", "as osp import sys import numpy as np import mmcv", "obj_names = [obj for obj in obj2id] new_res_dict = {}", "\"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if scene_im_id not in new_res_dict:", "assert obj_name in pred_name, \"{} not in {}\".format(obj_name, pred_name) pred_path", "-2.1190] 16: \"036_wood_block\", # [1.4265, -2.5305, 17.1890] 17: \"037_scissors\", #", "cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT)", "for _id, _name in id2obj.items()} if __name__ == \"__main__\": new_res_path", "\"011_banana\", # [-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\", # [5.3370, 5.8855,", "1: \"002_master_chef_can\", # [1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\", # [0.5575,", "# [4.9290, -2.4800, -13.2920] 13: \"024_bowl\", # [-0.2270, 0.7950, -2.9675]", "already exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths", "np import mmcv from tqdm import tqdm from functools import", "list of preds preds = mmcv.load(pred_path) for scene_im_id, pred_list in", "new_res_dict = {} for obj_name, pred_name in zip(obj_names, pkl_paths): assert", "# xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est =", "from tqdm import tqdm from functools import cmp_to_key cur_dir =", "in zip(obj_names, pkl_paths): assert obj_name in pred_name, \"{} not in", "[0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\", # [-0.9520, 1.4670, 4.3645] 4:", "obj2id = {_name: _id for _id, _name in id2obj.items()} if", "in {}\".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path", "wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4", "5.8855, 25.6115] 12: \"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920] 13: \"024_bowl\",", "8: \"009_gelatin_box\", # [1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\", # [2.4195,", "-0.5000, 3.5105] 2: \"003_cracker_box\", # [0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\",", "import os.path as osp import sys import numpy as np", "\"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920] 13: \"024_bowl\", # [-0.2270, 0.7950,", "obj_num = len(id2obj) obj2id = {_name: _id for _id, _name", "# [10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620]", "# [1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\", # [1.4460, -0.5915, 3.6085]", "__name__ == \"__main__\": new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", )", "-0.6995, -1.6145] 15: \"035_power_drill\", # [9.0710, 20.9360, -2.1190] 16: \"036_wood_block\",", "mmcv from tqdm import tqdm from functools import cmp_to_key cur_dir", "iprint(obj_name, pred_path) # pkl scene_im_id key, list of preds preds", "0.3075, 8.0715] 10: \"011_banana\", # [-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\",", "len(id2obj) obj2id = {_name: _id for _id, _name in id2obj.items()}", "PROJ_ROOT) from lib.pysixd import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh", "\"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\",", "new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{}", "tqdm from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT =", "[obj for obj in obj2id] new_res_dict = {} for obj_name,", "\"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\", # [1.2995, 2.4870,", "import numpy as np import mmcv from tqdm import tqdm", "[1.4265, -2.5305, 17.1890] 17: \"037_scissors\", # [7.0535, -28.1320, 0.0420] 18:", "4.3645] 4: \"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\", #", "\"002_master_chef_can\", # [1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\", # [0.5575, 1.7005,", "-13.2920] 13: \"024_bowl\", # [-0.2270, 0.7950, -2.9675] 14: \"025_mug\", #", "= [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint(\"new result path: {}\".format(new_res_path))", "pred_list: obj_id = pred[\"obj_id\"] score = pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"]", "[-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290] 6:", "\"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ]", "pred in pred_list: obj_id = pred[\"obj_id\"] score = pred[\"score\"] bbox_est", "pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) #", "in obj2id] new_res_dict = {} for obj_name, pred_name in zip(obj_names,", "pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\",", "{}\".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name,", "\"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj for", "# [0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\", # [10.5180, -1.9640, -0.4745]", "\"__main__\": new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path):", "wprint id2obj = { 1: \"002_master_chef_can\", # [1.3360, -0.5000, 3.5105]", "-8.2435] } obj_num = len(id2obj) obj2id = {_name: _id for", "\"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\",", "4.8050] 3: \"004_sugar_box\", # [-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\", #", "= xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res =", "25.6115] 12: \"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920] 13: \"024_bowl\", #", "bbox_est = pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose =", "12.1915, -1.4635] 11: \"019_pitcher_base\", # [5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\",", "\"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root =", "2: \"003_cracker_box\", # [0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\", # [-0.9520,", "osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id", "pred[\"pose_0\"] cur_new_res = { \"obj_id\": obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(),", "# [-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\", # [9.0710, 20.9360, -2.1190]", "= pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est)", "import xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj = {", "tqdm import tqdm from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__))", "0.0805, -8.2435] } obj_num = len(id2obj) obj2id = {_name: _id", "\"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\",", "refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res = { \"obj_id\":", "15: \"035_power_drill\", # [9.0710, 20.9360, -2.1190] 16: \"036_wood_block\", # [1.4265,", "0.0420] 18: \"040_large_marker\", # [0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\", #", "sys import numpy as np import mmcv from tqdm import", "3: \"004_sugar_box\", # [-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\", # [-0.0240,", "-2.4800, -13.2920] 13: \"024_bowl\", # [-0.2270, 0.7950, -2.9675] 14: \"025_mug\",", "} obj_num = len(id2obj) obj2id = {_name: _id for _id,", "pkl scene_im_id key, list of preds preds = mmcv.load(pred_path) for", "key, list of preds preds = mmcv.load(pred_path) for scene_im_id, pred_list", "# [-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290]", "\"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\", # [1.1645, -4.2015,", "in id2obj.items()} if __name__ == \"__main__\": new_res_path = osp.join( PROJ_ROOT,", "12: \"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920] 13: \"024_bowl\", # [-0.2270,", "<gh_stars>10-100 import os.path as osp import sys import numpy as", "pred_path iprint(obj_name, pred_path) # pkl scene_im_id key, list of preds", "[1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\", # [0.5575, 1.7005, 4.8050] 3:", "= osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd", "\"061_foam_brick\", # [-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj) obj2id", "0.7950, -2.9675] 14: \"025_mug\", # [-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\",", "\"037_scissors\", # [7.0535, -28.1320, 0.0420] 18: \"040_large_marker\", # [0.0460, -2.1040,", "xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res = {", "8.0715] 10: \"011_banana\", # [-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\", #", "{_name: _id for _id, _name in id2obj.items()} if __name__ ==", "[0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\", # [10.5180, -1.9640, -0.4745] 20:", "= { 1: \"002_master_chef_can\", # [1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\",", "5: \"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\", # [-0.1565,", "-2.5305, 17.1890] 17: \"037_scissors\", # [7.0535, -28.1320, 0.0420] 18: \"040_large_marker\",", "0.1150, 4.2625] 7: \"008_pudding_box\", # [1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\",", "\"obj_id\": obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(),", "= pred[\"obj_id\"] score = pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] # xyxy", "-1.5270, 8.4035] 5: \"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\",", "not in {}\".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path),", "\"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj for obj", "obj in obj2id] new_res_dict = {} for obj_name, pred_name in", "pred_name in zip(obj_names, pkl_paths): assert obj_name in pred_name, \"{} not", "of preds preds = mmcv.load(pred_path) for scene_im_id, pred_list in preds.items():", "\"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj", "\"024_bowl\", # [-0.2270, 0.7950, -2.9675] 14: \"025_mug\", # [-8.4675, -0.6995,", "\"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj for obj in", "osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test =", "import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import", "= len(id2obj) obj2id = {_name: _id for _id, _name in", "= pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res = { \"obj_id\": obj_id,", "\"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715] 10: \"011_banana\", # [-18.6730, 12.1915,", "in preds.items(): for pred in pred_list: obj_id = pred[\"obj_id\"] score", "lib.utils.utils import iprint, wprint id2obj = { 1: \"002_master_chef_can\", #", "= {} for obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name", "from lib.pysixd import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from", "-28.1320, 0.0420] 18: \"040_large_marker\", # [0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\",", "# [-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj) obj2id =", "scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict)", "assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id key, list", "[1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625] 7:", "[-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\", # [9.0710, 20.9360, -2.1190] 16:", "\"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\",", "# [7.0535, -28.1320, 0.0420] 18: \"040_large_marker\", # [0.0460, -2.1040, 0.3500]", "misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint, wprint", "= osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc", "bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res", "as np import mmcv from tqdm import tqdm from functools", "[4.9290, -2.4800, -13.2920] 13: \"024_bowl\", # [-0.2270, 0.7950, -2.9675] 14:", "_name in id2obj.items()} if __name__ == \"__main__\": new_res_path = osp.join(", "21: \"061_foam_brick\", # [-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj)", "[1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715] 10:", "\"004_sugar_box\", # [-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\", # [-0.0240, -1.5270,", "-11.8290] 6: \"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\", #", "1.4670, 4.3645] 4: \"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\",", ") if osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\"", "= osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{} already", "# [-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035]", "float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if scene_im_id", "3.1190] 8: \"009_gelatin_box\", # [1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\", #", "exists! overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths =", "} if scene_im_id not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res)", "from lib.utils.utils import iprint, wprint id2obj = { 1: \"002_master_chef_can\",", "id2obj = { 1: \"002_master_chef_can\", # [1.3360, -0.5000, 3.5105] 2:", "11: \"019_pitcher_base\", # [5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\", # [4.9290,", "overriding!\".format(new_res_path)) res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths = [", "\"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names =", "pred_path) # pkl scene_im_id key, list of preds preds =", "17.1890] 17: \"037_scissors\", # [7.0535, -28.1320, 0.0420] 18: \"040_large_marker\", #", "= [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\",", "scene_im_id, pred_list in preds.items(): for pred in pred_list: obj_id =", "\"006_mustard_bottle\", # [1.2995, 2.4870, -11.8290] 6: \"007_tuna_fish_can\", # [-0.1565, 0.1150,", "{} for obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name in", "bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if scene_im_id not in", "4: \"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035] 5: \"006_mustard_bottle\", # [1.2995,", "# [-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\", # [5.3370, 5.8855, 25.6115]", "-1.4635] 11: \"019_pitcher_base\", # [5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\", #", "# [0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\", # [-0.9520, 1.4670, 4.3645]", "in pred_name, \"{} not in {}\".format(obj_name, pred_name) pred_path = osp.join(res_root,", "[5.3370, 5.8855, 25.6115] 12: \"021_bleach_cleanser\", # [4.9290, -2.4800, -13.2920] 13:", "for obj in obj2id] new_res_dict = {} for obj_name, pred_name", "# [1.4265, -2.5305, 17.1890] 17: \"037_scissors\", # [7.0535, -28.1320, 0.0420]", "in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint(\"new", "= \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\",", "res_root = \"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\",", "pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose", "sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc from lib.utils.bbox_utils import", "3.5105] 2: \"003_cracker_box\", # [0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\", #", "preds.items(): for pred in pred_list: obj_id = pred[\"obj_id\"] score =", "[-18.6730, 12.1915, -1.4635] 11: \"019_pitcher_base\", # [5.3370, 5.8855, 25.6115] 12:", "16: \"036_wood_block\", # [1.4265, -2.5305, 17.1890] 17: \"037_scissors\", # [7.0535,", "\"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if scene_im_id not", "20.9360, -2.1190] 16: \"036_wood_block\", # [1.4265, -2.5305, 17.1890] 17: \"037_scissors\",", "\"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\",", "= osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl", "[-0.2270, 0.7950, -2.9675] 14: \"025_mug\", # [-8.4675, -0.6995, -1.6145] 15:", "# [1.4460, -0.5915, 3.6085] 9: \"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715]", "lib.pysixd import inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils", "inout, misc from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint,", "[-0.0805, 0.0805, -8.2435] } obj_num = len(id2obj) obj2id = {_name:", "{ 1: \"002_master_chef_can\", # [1.3360, -0.5000, 3.5105] 2: \"003_cracker_box\", #", "import sys import numpy as np import mmcv from tqdm", "= { \"obj_id\": obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(),", "pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est", "for pred in pred_list: obj_id = pred[\"obj_id\"] score = pred[\"score\"]", "= pred[\"pose_0\"] cur_new_res = { \"obj_id\": obj_id, \"score\": float(score), \"bbox_est\":", "iprint, wprint id2obj = { 1: \"002_master_chef_can\", # [1.3360, -0.5000,", "for scene_im_id, pred_list in preds.items(): for pred in pred_list: obj_id", "osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id key, list of", "cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT = osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from", "\"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\",", "[-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\", # [-0.0805, 0.0805, -8.2435] }", "# [-0.3950, -10.4130, 0.1620] 21: \"061_foam_brick\", # [-0.0805, 0.0805, -8.2435]", "pose_est = pred[\"pose_0\"] cur_new_res = { \"obj_id\": obj_id, \"score\": float(score),", "17: \"037_scissors\", # [7.0535, -28.1320, 0.0420] 18: \"040_large_marker\", # [0.0460,", "[-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\", # [1.1645, -4.2015, 3.1190] 8:", "\"003_cracker_box\", # [0.5575, 1.7005, 4.8050] 3: \"004_sugar_box\", # [-0.9520, 1.4670,", "2.4870, -11.8290] 6: \"007_tuna_fish_can\", # [-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\",", "\"025_mug\", # [-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\", # [9.0710, 20.9360,", "= 4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\",", "\"output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/\" iter_num_test = 4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\",", "[1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\", # [1.4460, -0.5915, 3.6085] 9:", "for obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name in pred_name,", "xyxy bbox_est_xywh = xyxy_to_xywh(bbox_est) refined_pose = pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"]", "import tqdm from functools import cmp_to_key cur_dir = osp.dirname(osp.abspath(__file__)) PROJ_ROOT", "cur_new_res = { \"obj_id\": obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\":", "preds = mmcv.load(pred_path) for scene_im_id, pred_list in preds.items(): for pred", "9: \"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715] 10: \"011_banana\", # [-18.6730,", "-10.4130, 0.1620] 21: \"061_foam_brick\", # [-0.0805, 0.0805, -8.2435] } obj_num", "os.path as osp import sys import numpy as np import", "7: \"008_pudding_box\", # [1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\", # [1.4460,", "[7.0535, -28.1320, 0.0420] 18: \"040_large_marker\", # [0.0460, -2.1040, 0.3500] 19:", "_id for _id, _name in id2obj.items()} if __name__ == \"__main__\":", "iter_num_test = 4 pkl_paths = [ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\",", "not in new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint()", "-2.1040, 0.3500] 19: \"051_large_clamp\", # [10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\",", "# [-0.1565, 0.1150, 4.2625] 7: \"008_pudding_box\", # [1.1645, -4.2015, 3.1190]", "] obj_names = [obj for obj in obj2id] new_res_dict =", "-2.9675] 14: \"025_mug\", # [-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\", #", "14: \"025_mug\", # [-8.4675, -0.6995, -1.6145] 15: \"035_power_drill\", # [9.0710,", "= [obj for obj in obj2id] new_res_dict = {} for", "\"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\",", "pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), } if scene_im_id not in new_res_dict: new_res_dict[scene_im_id]", "[-0.9520, 1.4670, 4.3645] 4: \"005_tomato_soup_can\", # [-0.0240, -1.5270, 8.4035] 5:", "_id, _name in id2obj.items()} if __name__ == \"__main__\": new_res_path =", "[ \"01_02MasterChefCan/inference_model_final_wo_optim-2de2b4e3/ycbv_002_master_chef_can_train_real_uw/results.pkl\", \"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\",", "\"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\", \"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\",", "obj_id = pred[\"obj_id\"] score = pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] #", "pkl_paths): assert obj_name in pred_name, \"{} not in {}\".format(obj_name, pred_name)", "pred[\"obj_id\"] score = pred[\"score\"] bbox_est = pred[\"bbox_det_xyxy\"] # xyxy bbox_est_xywh", "pred_name, \"{} not in {}\".format(obj_name, pred_name) pred_path = osp.join(res_root, pred_name)", "\"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names", "\"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\", ] obj_names = [obj for obj in obj2id] new_res_dict", "obj_name, pred_name in zip(obj_names, pkl_paths): assert obj_name in pred_name, \"{}", "\"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{} already exists! overriding!\".format(new_res_path)) res_root", "\"02_03CrackerBox/inference_model_final_wo_optim-41082f8a/ycbv_003_cracker_box_train_real_uw/results.pkl\", \"03_04SugarBox/inference_model_final_wo_optim-e09dec3e/ycbv_004_sugar_box_train_real_uw/results.pkl\", \"04_05TomatoSoupCan/inference_model_final_wo_optim-5641f5d3/ycbv_005_tomato_soup_can_train_real_uw/results.pkl\", \"05_06MustardBottle/inference_model_final_wo_optim-6ce23e94/ycbv_006_mustard_bottle_train_real_uw/results.pkl\", \"06_07TunaFishCan/inference_model_final_wo_optim-0a768962/ycbv_007_tuna_fish_can_train_real_uw/results.pkl\", \"07_08PuddingBox/inference_model_final_wo_optim-f2f2cf73/ycbv_008_pudding_box_train_real_uw/results.pkl\", \"08_09GelatinBox/inference_model_final_wo_optim-a303aa1e/ycbv_009_gelatin_box_train_real_uw/results.pkl\", \"09_10PottedMeatCan/inference_model_final_wo_optim-84a56ffd/ycbv_010_potted_meat_can_train_real_uw/results.pkl\", \"10_11Banana/inference_model_final_wo_optim-83947126/ycbv_011_banana_train_real_uw/results.pkl\", \"11_19PitcherBase/inference_model_final_wo_optim-af1c7e62/ycbv_019_pitcher_base_train_real_uw/results.pkl\",", "osp.normpath(osp.join(cur_dir, \"../../../../\")) sys.path.insert(0, PROJ_ROOT) from lib.pysixd import inout, misc from", "-0.5915, 3.6085] 9: \"010_potted_meat_can\", # [2.4195, 0.3075, 8.0715] 10: \"011_banana\",", "\"040_large_marker\", # [0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\", # [10.5180, -1.9640,", "if __name__ == \"__main__\": new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\",", "{ \"obj_id\": obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\":", "scene_im_id key, list of preds preds = mmcv.load(pred_path) for scene_im_id,", "xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj = { 1:", "== \"__main__\": new_res_path = osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if", "\"12_21BleachCleanser/inference_model_final_wo_optim-5d740a46/ycbv_021_bleach_cleanser_train_real_uw/results.pkl\", \"13_24Bowl/inference_model_final_wo_optim-f11815d3/ycbv_024_bowl_train_real_uw/results.pkl\", \"14_25Mug/inference_model_final_wo_optim-e4824065/ycbv_025_mug_train_real_uw/results.pkl\", \"15_35PowerDrill/inference_model_final_wo_optim-30d7d1da/ycbv_035_power_drill_train_real_uw/results.pkl\", \"16_36WoodBlock/inference_model_final_wo_optim-fbb38751/ycbv_036_wood_block_train_real_uw/results.pkl\", \"17_37Scissors/inference_model_final_wo_optim-5068c6bb/ycbv_037_scissors_train_real_uw/results.pkl\", \"18_40LargeMarker/inference_model_final_wo_optim-e8d5867c/ycbv_040_large_marker_train_real_uw/results.pkl\", \"19_51LargeClamp/inference_model_final_wo_optim-1ea79b34/ycbv_051_large_clamp_train_real_uw/results.pkl\", \"20_52ExtraLargeClamp/inference_model_final_wo_optim-cb595297/ycbv_052_extra_large_clamp_train_real_uw/results.pkl\", \"21_61FoamBrick/inference_model_final_wo_optim-d3757ca1/ycbv_061_foam_brick_train_real_uw/results.pkl\",", "from lib.utils.bbox_utils import xyxy_to_xywh from lib.utils.utils import iprint, wprint id2obj", "osp.join( PROJ_ROOT, \"datasets/BOP_DATASETS/ycbv/test/init_poses/\", \"resnest50d_online_AugCosyAAEGray_mlBCE_DoubleMask_ycbv_pbr_100e_so_GdrnPbrPose_withYolov4PbrBbox_wDeepimPbrPose_ycbv_train_real_uw.json\", ) if osp.exists(new_res_path): wprint(\"{} already exists!", "pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path) # pkl scene_im_id key,", "\"008_pudding_box\", # [1.1645, -4.2015, 3.1190] 8: \"009_gelatin_box\", # [1.4460, -0.5915,", "import iprint, wprint id2obj = { 1: \"002_master_chef_can\", # [1.3360,", "[9.0710, 20.9360, -2.1190] 16: \"036_wood_block\", # [1.4265, -2.5305, 17.1890] 17:", "[10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\", # [-0.3950, -10.4130, 0.1620] 21:", "\"035_power_drill\", # [9.0710, 20.9360, -2.1190] 16: \"036_wood_block\", # [1.4265, -2.5305,", "# pkl scene_im_id key, list of preds preds = mmcv.load(pred_path)", "obj_id, \"score\": float(score), \"bbox_est\": bbox_est_xywh.tolist(), \"pose_est\": pose_est.tolist(), \"pose_refine\": refined_pose.tolist(), }", "new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint(\"new result path:", "19: \"051_large_clamp\", # [10.5180, -1.9640, -0.4745] 20: \"052_extra_large_clamp\", # [-0.3950,", "new_res_dict: new_res_dict[scene_im_id] = [] new_res_dict[scene_im_id].append(cur_new_res) inout.save_json(new_res_path, new_res_dict) iprint() iprint(\"new result", "obj_name in pred_name, \"{} not in {}\".format(obj_name, pred_name) pred_path =", "pred_name) pred_path = osp.join(res_root, pred_name) assert osp.exists(pred_path), pred_path iprint(obj_name, pred_path)", "18: \"040_large_marker\", # [0.0460, -2.1040, 0.3500] 19: \"051_large_clamp\", # [10.5180,", "pred[\"pose_{}\".format(iter_num_test)] pose_est = pred[\"pose_0\"] cur_new_res = { \"obj_id\": obj_id, \"score\":" ]
[ "<filename>tests/test_app/rest_app/rest_app/services/account_service.py from flask import abort from guniflask.context import service from", "'<PASSWORD>', } } def login(self, username: str, password: str): if", "in self.accounts or self.accounts[username]['password'] != password: return abort(403) account =", "['role_admin'], 'password': '<PASSWORD>', } } def login(self, username: str, password:", "= jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username, 'access_token': token, }", "token, } def get(self, username: str): if username not in", "= { 'root': { 'authorities': ['role_admin'], 'password': '<PASSWORD>', } }", "in self.accounts: return abort(404) return { 'username': username, 'authorities': self.accounts[username]['authorities']", "self.accounts: return abort(404) return { 'username': username, 'authorities': self.accounts[username]['authorities'] }", "account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username':", "'password': '<PASSWORD>', } } def login(self, username: str, password: str):", "service from ..config.jwt_config import jwt_manager @service class AccountService: accounts =", "= self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username,", "str): if username not in self.accounts or self.accounts[username]['password'] != password:", "jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username, 'access_token': token, } def", "'username': username, 'access_token': token, } def get(self, username: str): if", "str): if username not in self.accounts: return abort(404) return {", "jwt_manager @service class AccountService: accounts = { 'root': { 'authorities':", "} } def login(self, username: str, password: str): if username", "self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username, 'access_token':", "from ..config.jwt_config import jwt_manager @service class AccountService: accounts = {", "username: str, password: str): if username not in self.accounts or", "accounts = { 'root': { 'authorities': ['role_admin'], 'password': '<PASSWORD>', }", "return { 'username': username, 'access_token': token, } def get(self, username:", "import service from ..config.jwt_config import jwt_manager @service class AccountService: accounts", "username not in self.accounts or self.accounts[username]['password'] != password: return abort(403)", "{ 'authorities': ['role_admin'], 'password': '<PASSWORD>', } } def login(self, username:", "or self.accounts[username]['password'] != password: return abort(403) account = self.accounts[username] token", "import jwt_manager @service class AccountService: accounts = { 'root': {", "{ 'root': { 'authorities': ['role_admin'], 'password': '<PASSWORD>', } } def", "get(self, username: str): if username not in self.accounts: return abort(404)", "username, 'access_token': token, } def get(self, username: str): if username", "guniflask.context import service from ..config.jwt_config import jwt_manager @service class AccountService:", "login(self, username: str, password: str): if username not in self.accounts", "username: str): if username not in self.accounts: return abort(404) return", "{ 'username': username, 'access_token': token, } def get(self, username: str):", "str, password: str): if username not in self.accounts or self.accounts[username]['password']", "token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return { 'username': username, 'access_token': token,", "password: return abort(403) account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username)", "def login(self, username: str, password: str): if username not in", "flask import abort from guniflask.context import service from ..config.jwt_config import", "if username not in self.accounts: return abort(404) return { 'username':", "..config.jwt_config import jwt_manager @service class AccountService: accounts = { 'root':", "def get(self, username: str): if username not in self.accounts: return", "'authorities': ['role_admin'], 'password': '<PASSWORD>', } } def login(self, username: str,", "return abort(403) account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return", "username not in self.accounts: return abort(404) return { 'username': username,", "@service class AccountService: accounts = { 'root': { 'authorities': ['role_admin'],", "AccountService: accounts = { 'root': { 'authorities': ['role_admin'], 'password': '<PASSWORD>',", "import abort from guniflask.context import service from ..config.jwt_config import jwt_manager", "} def login(self, username: str, password: str): if username not", "username=username) return { 'username': username, 'access_token': token, } def get(self,", "class AccountService: accounts = { 'root': { 'authorities': ['role_admin'], 'password':", "abort(403) account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'], username=username) return {", "!= password: return abort(403) account = self.accounts[username] token = jwt_manager.create_access_token(authorities=account['authorities'],", "from guniflask.context import service from ..config.jwt_config import jwt_manager @service class", "abort from guniflask.context import service from ..config.jwt_config import jwt_manager @service", "if username not in self.accounts or self.accounts[username]['password'] != password: return", "self.accounts[username]['password'] != password: return abort(403) account = self.accounts[username] token =", "password: str): if username not in self.accounts or self.accounts[username]['password'] !=", "self.accounts or self.accounts[username]['password'] != password: return abort(403) account = self.accounts[username]", "not in self.accounts: return abort(404) return { 'username': username, 'authorities':", "} def get(self, username: str): if username not in self.accounts:", "'access_token': token, } def get(self, username: str): if username not", "'root': { 'authorities': ['role_admin'], 'password': '<PASSWORD>', } } def login(self,", "not in self.accounts or self.accounts[username]['password'] != password: return abort(403) account", "from flask import abort from guniflask.context import service from ..config.jwt_config" ]
[ "\"addends:\" print oneDigit print twoDigit print print \"sum:\" print oneDigit", "print oneDigit print twoDigit print print \"sum:\" print oneDigit +", "print A print B print print \"sum: \" print A", "\"B\", \"C\", \"D\", \"E\"] oneDigit = pd.Series([1, 2, 3, 4,", "twoDigit print print \"sum:\" print oneDigit + twoDigit print I2", "print I2 = [\"A\", \"B\", \"C\"] I3 = [\"B\", \"C\",", "print Y print print \"sum:\" print X + Y print", "= pd.Series([\"hello \", \"my \", \"name\", \"is\", \"brad\"]) B =", "\", \"my \", \"name\", \"is\", \"brad\"]) B = pd.Series([\"world\", \"real\"])", "\"real\"]) print \"addends:\" print A print B print print \"sum:", "[\"B\", \"C\", \"D\", \"E\"] X = pd.Series([0, 1, 2], pd.Index(I2))", "I = [\"A\", \"B\", \"C\", \"D\", \"E\"] oneDigit = pd.Series([1,", "[\"A\", \"B\", \"C\", \"D\", \"E\"] oneDigit = pd.Series([1, 2, 3,", "pd.Index(I)) print \"addends:\" print oneDigit print twoDigit print print \"sum:\"", "pd I = [\"A\", \"B\", \"C\", \"D\", \"E\"] oneDigit =", "\"D\", \"E\"] X = pd.Series([0, 1, 2], pd.Index(I2)) Y =", "0], pd.Index(I3)) print \"addends:\" print X print Y print print", "2], pd.Index(I2)) Y = pd.Series([10, 20, 0, 0], pd.Index(I3)) print", "\"is\", \"brad\"]) B = pd.Series([\"world\", \"real\"]) print \"addends:\" print A", "pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print", "import pandas as pd I = [\"A\", \"B\", \"C\", \"D\",", "as pd I = [\"A\", \"B\", \"C\", \"D\", \"E\"] oneDigit", "X = pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10, 20,", "pd.Series([\"world\", \"real\"]) print \"addends:\" print A print B print print", "+ Y print A = pd.Series([\"hello \", \"my \", \"name\",", "print print \"sum:\" print oneDigit + twoDigit print I2 =", "= pd.Series([10, 20, 0, 0], pd.Index(I3)) print \"addends:\" print X", "A print B print print \"sum: \" print A +", "print \"addends:\" print oneDigit print twoDigit print print \"sum:\" print", "= [\"A\", \"B\", \"C\", \"D\", \"E\"] oneDigit = pd.Series([1, 2,", "print \"addends:\" print X print Y print print \"sum:\" print", "B = pd.Series([\"world\", \"real\"]) print \"addends:\" print A print B", "40, 50], pd.Index(I)) print \"addends:\" print oneDigit print twoDigit print", "A = pd.Series([\"hello \", \"my \", \"name\", \"is\", \"brad\"]) B", "Y print print \"sum:\" print X + Y print A", "<gh_stars>1000+ import pandas as pd I = [\"A\", \"B\", \"C\",", "= pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print \"addends:\" print", "\"B\", \"C\"] I3 = [\"B\", \"C\", \"D\", \"E\"] X =", "\"C\", \"D\", \"E\"] X = pd.Series([0, 1, 2], pd.Index(I2)) Y", "\"sum:\" print X + Y print A = pd.Series([\"hello \",", "\"D\", \"E\"] oneDigit = pd.Series([1, 2, 3, 4, 5], pd.Index(I))", "5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40, 50], pd.Index(I))", "print oneDigit + twoDigit print I2 = [\"A\", \"B\", \"C\"]", "\"brad\"]) B = pd.Series([\"world\", \"real\"]) print \"addends:\" print A print", "pd.Series([\"hello \", \"my \", \"name\", \"is\", \"brad\"]) B = pd.Series([\"world\",", "50], pd.Index(I)) print \"addends:\" print oneDigit print twoDigit print print", "I2 = [\"A\", \"B\", \"C\"] I3 = [\"B\", \"C\", \"D\",", "\"my \", \"name\", \"is\", \"brad\"]) B = pd.Series([\"world\", \"real\"]) print", "oneDigit print twoDigit print print \"sum:\" print oneDigit + twoDigit", "print \"addends:\" print A print B print print \"sum: \"", "twoDigit print I2 = [\"A\", \"B\", \"C\"] I3 = [\"B\",", "pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20,", "30, 40, 50], pd.Index(I)) print \"addends:\" print oneDigit print twoDigit", "I3 = [\"B\", \"C\", \"D\", \"E\"] X = pd.Series([0, 1,", "pd.Index(I3)) print \"addends:\" print X print Y print print \"sum:\"", "[\"A\", \"B\", \"C\"] I3 = [\"B\", \"C\", \"D\", \"E\"] X", "print print \"sum:\" print X + Y print A =", "= pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10,", "X print Y print print \"sum:\" print X + Y", "print B print print \"sum: \" print A + B", "print A = pd.Series([\"hello \", \"my \", \"name\", \"is\", \"brad\"])", "Y = pd.Series([10, 20, 0, 0], pd.Index(I3)) print \"addends:\" print", "print X + Y print A = pd.Series([\"hello \", \"my", "print \"sum:\" print X + Y print A = pd.Series([\"hello", "print X print Y print print \"sum:\" print X +", "pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10, 20, 0, 0],", "= pd.Series([\"world\", \"real\"]) print \"addends:\" print A print B print", "print \"sum:\" print oneDigit + twoDigit print I2 = [\"A\",", "3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40,", "= pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10, 20, 0,", "20, 0, 0], pd.Index(I3)) print \"addends:\" print X print Y", "print twoDigit print print \"sum:\" print oneDigit + twoDigit print", "oneDigit = pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit =", "0, 0], pd.Index(I3)) print \"addends:\" print X print Y print", "= [\"A\", \"B\", \"C\"] I3 = [\"B\", \"C\", \"D\", \"E\"]", "\"E\"] X = pd.Series([0, 1, 2], pd.Index(I2)) Y = pd.Series([10,", "\"C\"] I3 = [\"B\", \"C\", \"D\", \"E\"] X = pd.Series([0,", "\"C\", \"D\", \"E\"] oneDigit = pd.Series([1, 2, 3, 4, 5],", "pd.Index(I2)) Y = pd.Series([10, 20, 0, 0], pd.Index(I3)) print \"addends:\"", "pandas as pd I = [\"A\", \"B\", \"C\", \"D\", \"E\"]", "pd.Series([10, 20, 0, 0], pd.Index(I3)) print \"addends:\" print X print", "20, 30, 40, 50], pd.Index(I)) print \"addends:\" print oneDigit print", "\"addends:\" print X print Y print print \"sum:\" print X", "\"E\"] oneDigit = pd.Series([1, 2, 3, 4, 5], pd.Index(I)) twoDigit", "pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print \"addends:\" print oneDigit", "\"sum:\" print oneDigit + twoDigit print I2 = [\"A\", \"B\",", "twoDigit = pd.Series([10, 20, 30, 40, 50], pd.Index(I)) print \"addends:\"", "\"addends:\" print A print B print print \"sum: \" print", "+ twoDigit print I2 = [\"A\", \"B\", \"C\"] I3 =", "2, 3, 4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30,", "4, 5], pd.Index(I)) twoDigit = pd.Series([10, 20, 30, 40, 50],", "X + Y print A = pd.Series([\"hello \", \"my \",", "Y print A = pd.Series([\"hello \", \"my \", \"name\", \"is\",", "\", \"name\", \"is\", \"brad\"]) B = pd.Series([\"world\", \"real\"]) print \"addends:\"", "= [\"B\", \"C\", \"D\", \"E\"] X = pd.Series([0, 1, 2],", "1, 2], pd.Index(I2)) Y = pd.Series([10, 20, 0, 0], pd.Index(I3))", "oneDigit + twoDigit print I2 = [\"A\", \"B\", \"C\"] I3", "\"name\", \"is\", \"brad\"]) B = pd.Series([\"world\", \"real\"]) print \"addends:\" print" ]
[ "all the top-level tokens if 'nonterminal' == token: if value.startswith('::'):", "= value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include() elif", "'<'+name+'>', 'exec') except SyntaxError as e: raise nelly.error('%d: %s: %s',", "[''] # save the whitepsace of the first line ws", "elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise", "# restore the current tokens self.tokens = self.tokens_stack[-1] # #", "iterate over all the tokens while self.tokens: (token,value,line,col) = self.tokens.Next()", "= None start = False (token,value,line,col) = self.tokens.Next() if 'constant'", "== token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:],", "try: fullpath = os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including", "(token,value,line,col) = self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing ]", "elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token:", "elif 'comma' != token: raise nelly.error('Missing comma at line %d,", "for s in value.split('\\n') if s.strip()] or [''] # save", "new container and add it to the program nonterminal =", "quoted value (token,value,line,col) = self.tokens.Next() # this will always be", "_include(self): (token,value,line,col) = self.tokens.Next() # file names are quoted if", "# # Multi-line comments # def _comment(self): # consume and", "error, missing colon at line %d, column %d', line, col)", "= Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference'", "line, col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except", "self._quote() # try opening the file in each include directory,", "(token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing range", "nelly.error('Bad indentation in code block at line %d, column %d',", "scanner based on the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex'))", "%d', token, line, col) def _quote(self): # this will always", "value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' ==", "terminal quote self.tokens.Next() return value # # Slice a string", "in self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir, path) content", "None == content: raise nelly.error('Could not load file %s', repr(path))", "'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token:", "get the quoted value path = self._quote() # try opening", "self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify", "IndexError: raise nelly.error('Applying slice to nothing at line %d, column", "%d', line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley' != token:", "'rules.lex')) # container for the compiled program self.program = Program()", "# strip and rejoin the code codeblock = '\\n'.join(s[len(ws):] for", "%s', repr(path)) # ignore empty file if not content: return", "it to the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] =", "directory, ignore errors content = None for include_dir in self.pwd[-1:]", "value.split('\\n') if s.strip()] or [''] # save the whitepsace of", "'rangle' != token: raise nelly.error('Missing > at %d, column %d',", "line %d, column %d', line, col) elif 'langle' == token:", "= None self.group = None def Parse(self, grammarFile): grammar =", "return (front,front+1) elif 'colon' != token: raise nelly.error('Missing colon at", "program self.program = Program() self.tokens_stack = [] self.groups_stack = []", "at line %d, column %d', line, col) elif 'langle' ==", "= upper,lower return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if", "expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice to nothing at", "if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post'))", "line %d, column %d', line, col) # parse zero or", "content = open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break except:", "re import logging import nelly from .scanner import Scanner from", "except IndexError: raise nelly.error('Applying range to nothing at line %d,", "self.tokens_stack.append(self.tokens) # iterate over all the tokens while self.tokens: (token,value,line,col)", "line %d, column %d', line, col) # strip and rejoin", "Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled program self.program =", "nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) # # Include other", "self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif", "expression = Expression((line,col)) elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS)", "== token: pass else: raise nelly.error('Unhandled token \"%s\" at line", "include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir, path)", "_quote(self): # this will always be the quoted value (token,value,line,col)", "reference to the tokens for when included files are parsed", "upper = value (token,value,line,col) = self.tokens.Next() if 'rcurley' == token:", "elif 'option' == token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name)", "return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' !=", "'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token in", "Include other BNF files # def _include(self): (token,value,line,col) = self.tokens.Next()", "nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next()", "start = False (token,value,line,col) = self.tokens.Next() if 'constant' == token:", "= self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next()", "0 upper = 0 (token,value,line,col) = self.tokens.Next() if 'constant' !=", "nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif 'decorator' == token:", "value) elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif 'lbracket'", "and add it to the program nonterminal = Nonterminal(_type, name)", "= self.tokens.Next() if 'constant' == token: upper = value else:", "raise nelly.error('Missing weight at line %d, column %d', line, col)", "raise nelly.error('Unknown option: %s %s', token, value) (token,value,line,col) = self.tokens.Next()", "token: return (lower,upper) elif 'comma' != token: raise nelly.error('Missing comma", "= True (token,value,line,col) = self.tokens.Next() if 'rbracket' == token: if", "at line %d, column %d', line, col) return (front,back) #", "%s: %s', e.lineno, e.msg, repr(e.text)) # # Include other BNF", "tokens while True: (token,value,line,col) = self.tokens.Next() if 'start_comment' == token:", "# ignore empty file if not content: return # compile", "'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying", "a new container and add it to the program nonterminal", "return (lower,upper) elif 'comma' != token: raise nelly.error('Missing comma at", "%d, column %d', line, col) elif 'langle' == token: expression.Weight(self._weight())", "slice at line %d, column %d', line, col) return (front,front+1)", "if None == content: raise nelly.error('Could not load file %s',", "value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token:", "be the terminal quote self.tokens.Next() return value # # Slice", "'colon' != token: raise nelly.error('Parse error, missing colon at line", "2008-2020 <NAME> # import sys import os import re import", "%s at %d:%d', token, repr(value), line, col) self.tokens_stack.pop() return self.program", "start = True (token,value,line,col) = self.tokens.Next() if 'rbracket' == token:", "line %d, column %d', line, col) return (front,back) # #", "a code object # def _python_code(self, name): (token,value,line,col) = self.tokens.Next()", "file was found, throw an error if None == content:", "self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if", "expressions until a semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def", "if 'constant' == token: upper = value else: raise nelly.error('Missing", "def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression =", "line %d, column %d', line, col) return (front,front+1) elif 'colon'", "'start_python_code' == token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post'", "elif 'comma' == token: continue elif 'option' == token: nonterminal.options.append(value)", "(token,value,line,col) = self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing }", "code block at line %d, column %d', line, col) #", "col) def _quote(self): # this will always be the quoted", "at line %d, column %d', line, col) lower = value", "for s in values) # eat the end_python_code token self.tokens.Next()", "current tokens self.tokens = self.tokens_stack[-1] # # Multi-line comments #", "tokens self.tokens = self.tokens_stack[-1] # # Multi-line comments # def", "import * class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs", "%s (%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep", "(token,value,line,col) = self.tokens.Next() if 'start_comment' == token: self._comment() if 'end_comment'", "token: nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression) expression =", "# def _range(self): lower = 0 upper = 0 (token,value,line,col)", "file path expected') # get the quoted value path =", "token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote)", "line %d, column %d', line, col) elif 'lcurley' == token:", "[ os.path.join(nelly.root, 'grammars') ] self.pwd = [] # setup the", "Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)',", "token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif 'decorator' ==", "'constant' == token: front = value start = True (token,value,line,col)", "quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote()", "value) elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen',", "= self.tokens.Next() if 'rparen' == token: break elif 'comma' ==", "self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote", "the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col)", "front = None back = None start = False (token,value,line,col)", "token: raise nelly.error('Missing ] at line %d, column %d', line,", "= Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() #", "token: break elif 'comma' == token: continue elif 'option' ==", "nelly.error('Empty slice at line %d, column %d', line, col) return", "line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: back", "line, col) def _quote(self): # this will always be the", "if not content: return # compile it inline self.Parse(content) self.pwd.pop()", "0 (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing", "token: expression.Weight(self._weight()) elif 'empty' == token: pass else: raise nelly.error('Unhandled", "self.tokens.Next() if 'start_comment' == token: self._comment() if 'end_comment' == token:", "%d, column %d', token, line, col) def _quote(self): # this", "# # Repeat a range # def _range(self): lower =", "'empty' == token: pass else: raise nelly.error('Unhandled token \"%s\" at", "r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or", "on the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container", "'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL,", "lower = value upper = value (token,value,line,col) = self.tokens.Next() if", "whitepsace of the first line ws = re.compile(r'\\s*').match(values[0]).group() # check", "the whitepsace of the first line ws = re.compile(r'\\s*').match(values[0]).group() #", "value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if value.startswith('::'): value", "self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if value.startswith('::'): value =", "= self.tokens.Next() if 'constant' == token: back = value (token,value,line,col)", "BNF files # def _include(self): (token,value,line,col) = self.tokens.Next() # file", "(token,value,line,col) = self.tokens.Next() if 'rbracket' == token: if False ==", "token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name)", "not s.startswith(ws)]: raise nelly.error('Bad indentation in code block at line", "= self.tokens.Next() # file names are quoted if token not", "value (token,value,line,col) = self.tokens.Next() # this will always be the", "file names are quoted if token not in ['start_single_quote', 'start_double_quote',", "in code section') elif 'start_comment' == token: self._comment() else: raise", "while True: (token,value,line,col) = self.tokens.Next() if 'start_comment' == token: self._comment()", "# # Compile the Python into a code object #", "= value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if value.startswith('::'):", "column %d', token, line, col) def _quote(self): # this will", "line %d, column %d', token, line, col) def _quote(self): #", "the code codeblock = '\\n'.join(s[len(ws):] for s in values) #", "if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file", "the scanner based on the regular expressions self.scanner = Scanner(os.path.join(nelly.root,", "always be the quoted value (token,value,line,col) = self.tokens.Next() # this", "def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise", "raise nelly.error('Please specify pre or post in code section') elif", "upper = value else: raise nelly.error('Missing range at line %d,", "elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token:", "if 'start_comment' == token: self._comment() if 'end_comment' == token: return", "elif 'colon' != token: raise nelly.error('Missing colon at line %d,", "throw an error if None == content: raise nelly.error('Could not", "token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif", "if 'nonterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL,", "raise nelly.error('Missing range at line %d, column %d', line, col)", "line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if", "else: raise nelly.error('Please specify pre or post in code section')", "} at line %d, column %d', line, col) if lower", "based on the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) #", "column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley' !=", "quoted value path = self._quote() # try opening the file", "errors content = None for include_dir in self.pwd[-1:] + self.include_dirs:", "in value.split('\\n') if s.strip()] or [''] # save the whitepsace", "restore the current tokens self.tokens = self.tokens_stack[-1] # # Multi-line", "# handle all the top-level tokens if 'nonterminal' == token:", "nelly.error('Applying slice to nothing at line %d, column %d', line,", "self.tokens.Next() if 'constant' == token: back = value (token,value,line,col) =", "# save the whitepsace of the first line ws =", "self.tokens.Next() if 'constant' == token: upper = value else: raise", "Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote',", "# def _include(self): (token,value,line,col) = self.tokens.Next() # file names are", "if sentinel == token: nonterminal.expressions.append(expression) break elif delimiter == token:", "os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath))", "== token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range", "the end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except", "self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include() elif 'start_python_code' ==", "'rcurley' == token: return (lower,upper) elif 'comma' != token: raise", "content: raise nelly.error('Could not load file %s', repr(path)) # ignore", "Scanner from .program import Program from .types import * class", "[] self.groups = None self.group = None def Parse(self, grammarFile):", "== token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value)", "check indentation if [s for s in values if not", "def _comment(self): # consume and disregard the tokens while True:", "%d, column %d', line, col) return value # # Compile", "= Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel ==", "more expressions until a semicolon is found self._expressions('pipe', 'semicolon', nonterminal)", "self.tokens_stack[-1] # # Multi-line comments # def _comment(self): # consume", "self.tokens = self.scanner.Scan(grammar) # keep a reference to the tokens", "or post in code section') elif 'start_comment' == token: self._comment()", "= value else: raise nelly.error('Missing range at line %d, column", "self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token:", "elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal)", "Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' ==", "== token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value)", "comments # def _comment(self): # consume and disregard the tokens", "token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif", "self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break elif delimiter ==", "lower,upper = upper,lower return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next()", "== token: return (lower,upper) elif 'comma' != token: raise nelly.error('Missing", "self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd =", "# create a new container and add it to the", "token: self._include() elif 'start_python_code' == token: if r'<%pre' == value:", "self._slice()) except IndexError: raise nelly.error('Applying slice to nothing at line", "raise nelly.error('Could not load file %s', repr(path)) # ignore empty", "to the tokens for when included files are parsed self.tokens_stack.append(self.tokens)", "included files are parsed self.tokens_stack.append(self.tokens) # iterate over all the", "container for the compiled program self.program = Program() self.tokens_stack =", "self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing ] at line", "eat the end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec')", "return (front,back) # # Repeat a range # def _range(self):", "top-level tokens if 'nonterminal' == token: if value.startswith('::'): value =", "['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected') # get", "return self.program def _nonterminal(self, _type, name): # create a new", "def _nonterminal(self, _type, name): # create a new container and", "sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while self.tokens:", "%d', line, col) elif 'langle' == token: expression.Weight(self._weight()) elif 'empty'", "line, col) return (front,front+1) elif 'colon' != token: raise nelly.error('Missing", "s.strip()] or [''] # save the whitepsace of the first", "return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e: raise nelly.error('%d:", "a semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter,", "token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal'", "parse zero or more expressions until a semicolon is found", "is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal):", "elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token:", "SyntaxError as e: raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text))", "self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or post in code", "== token: continue elif 'option' == token: nonterminal.options.append(value) if value", "token: pass else: raise nelly.error('Unhandled token \"%s\" at line %d,", "try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e: raise", "(token,value,line,col) = self.tokens.Next() # file names are quoted if token", "token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value:", "= grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens", "content = None for include_dir in self.pwd[-1:] + self.include_dirs: try:", "'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code", "in values) # eat the end_python_code token self.tokens.Next() try: return", "col) return (front,front+1) elif 'colon' != token: raise nelly.error('Missing colon", "line, col) return value # # Compile the Python into", "a reference to the tokens for when included files are", "nothing at line %d, column %d', line, col) elif 'lcurley'", "self.tokens.Next() if 'constant' != token: raise nelly.error('Missing weight at line", "try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice to nothing", "'varterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value)", "include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd = [] #", "nelly.error('Missing ] at line %d, column %d', line, col) return", "elif 'start_comment' == token: self._comment() else: raise nelly.error('Unhandled %s %s", "'include' == token: self._include() elif 'start_python_code' == token: if r'<%pre'", "> at %d, column %d', line, col) return value #", "token: raise nelly.error('Missing comma at line %d, column %d', line,", "the tokens for when included files are parsed self.tokens_stack.append(self.tokens) #", "column %d', line, col) # strip and rejoin the code", "self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option:", "%d, column %d', line, col) return (front,front+1) elif 'colon' !=", "self.tokens.Next() # parse any optional arguments for the non-terminal if", "to nothing at line %d, column %d', line, col) elif", "'constant' != token: raise nelly.error('Missing weight at line %d, column", "if lower > upper: lower,upper = upper,lower return (lower,upper) def", "file if not content: return # compile it inline self.Parse(content)", "%d', line, col) # strip and rejoin the code codeblock", "repr(e.text)) # # Include other BNF files # def _include(self):", "token: raise nelly.error('Missing > at %d, column %d', line, col)", "in values if not s.startswith(ws)]: raise nelly.error('Bad indentation in code", "range # def _range(self): lower = 0 upper = 0", "== token: expression.Weight(self._weight()) elif 'empty' == token: pass else: raise", "== token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code =", "= self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing > at", "fullpath = os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including file", "self.pwd.pop() # restore the current tokens self.tokens = self.tokens_stack[-1] #", "else: raise nelly.error('Unhandled token \"%s\" at line %d, column %d',", "# if no file was found, throw an error if", "self.groups_stack = [] self.group_stack = [] self.groups = None self.group", "self.group = None def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name))", "_python_code(self, name): (token,value,line,col) = self.tokens.Next() values = [s for s", "(token,value,line,col) = self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break elif", "'option' == token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif", "indentation if [s for s in values if not s.startswith(ws)]:", "e.msg, repr(e.text)) # # Include other BNF files # def", "= self.tokens.Next() if 'constant' == token: front = value start", "import Program from .types import * class Parser(object): def __init__(self,", "self.tokens.Next() if 'rparen' == token: break elif 'comma' == token:", "a string # def _slice(self): front = None back =", "== token: if False == start: raise nelly.error('Empty slice at", "= self.tokens.Next() if 'constant' != token: raise nelly.error('Missing weight at", "raise nelly.error('Applying slice to nothing at line %d, column %d',", "%d', line, col) return (front,front+1) elif 'colon' != token: raise", "= self.tokens.Next() # handle all the top-level tokens if 'nonterminal'", "while self.tokens: (token,value,line,col) = self.tokens.Next() # handle all the top-level", "value) elif 'varterminal' == token: if value.startswith('::'): value = value[2:]", "_type, name): # create a new container and add it", "option: %s %s', token, value) (token,value,line,col) = self.tokens.Next() if 'colon'", "# # Slice a string # def _slice(self): front =", "if 'constant' != token: raise nelly.error('Missing range at line %d,", "first line ws = re.compile(r'\\s*').match(values[0]).group() # check indentation if [s", "= self.tokens.Next() if 'constant' != token: raise nelly.error('Missing range at", "from .types import * class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs", "'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek()", "def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d", "object # def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values =", "token: raise nelly.error('Missing } at line %d, column %d', line,", "Expression((line,col)) elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal)", "to the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal", "elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL,", "(front,back) # # Repeat a range # def _range(self): lower", "not load file %s', repr(path)) # ignore empty file if", "value else: raise nelly.error('Missing range at line %d, column %d',", "%d, column %d', line, col) lower = value upper =", "break elif delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif", "s in values if not s.startswith(ws)]: raise nelly.error('Bad indentation in", "= include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd = []", "will always be the terminal quote self.tokens.Next() return value #", "%d', line, col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range())", "the quoted value path = self._quote() # try opening the", "token, repr(value), line, col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type,", "line ws = re.compile(r'\\s*').match(values[0]).group() # check indentation if [s for", "in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected') #", "if [s for s in values if not s.startswith(ws)]: raise", "+ self.include_dirs: try: fullpath = os.path.join(include_dir, path) content = open(fullpath,", "token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path", "expression.Weight(self._weight()) elif 'empty' == token: pass else: raise nelly.error('Unhandled token", "if 'constant' == token: back = value (token,value,line,col) = self.tokens.Next()", "grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar))", "logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) #", "self.groups = None self.group = None def Parse(self, grammarFile): grammar", "it inline self.Parse(content) self.pwd.pop() # restore the current tokens self.tokens", "slice to nothing at line %d, column %d', line, col)", "'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE,", "token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include'", "!= token: raise nelly.error('Missing colon at line %d, column %d',", "the non-terminal if 'lparen' == token: while True: (token,value,line,col) =", "# def _comment(self): # consume and disregard the tokens while", "quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes',", "token: raise nelly.error('Parse error, missing colon at line %d, column", "raise nelly.error('Parse error, missing colon at line %d, column %d',", "r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else:", "self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) =", "token: front = value start = True (token,value,line,col) = self.tokens.Next()", "nelly.error('Missing range at line %d, column %d', line, col) lower", "elif 'rbracket' != token: raise nelly.error('Missing ] at line %d,", "empty file if not content: return # compile it inline", "files # def _include(self): (token,value,line,col) = self.tokens.Next() # file names", "import Scanner from .program import Program from .types import *", "values) # eat the end_python_code token self.tokens.Next() try: return compile(codeblock,", "%d, column %d', line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle'", "and rejoin the code codeblock = '\\n'.join(s[len(ws):] for s in", "[s for s in value.split('\\n') if s.strip()] or [''] #", "if s.strip()] or [''] # save the whitepsace of the", "zero or more expressions until a semicolon is found self._expressions('pipe',", "compile it inline self.Parse(content) self.pwd.pop() # restore the current tokens", "tokens for when included files are parsed self.tokens_stack.append(self.tokens) # iterate", "== token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' ==", "(%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a", "(front,front+1) elif 'colon' != token: raise nelly.error('Missing colon at line", "# container for the compiled program self.program = Program() self.tokens_stack", "value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' == token: if", "False (token,value,line,col) = self.tokens.Next() if 'constant' == token: front =", "None start = False (token,value,line,col) = self.tokens.Next() if 'constant' ==", "not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected')", "'exec') except SyntaxError as e: raise nelly.error('%d: %s: %s', e.lineno,", "lower > upper: lower,upper = upper,lower return (lower,upper) def _weight(self):", "# # (c) 2008-2020 <NAME> # import sys import os", "[] self.groups_stack = [] self.group_stack = [] self.groups = None", "not content: return # compile it inline self.Parse(content) self.pwd.pop() #", "e: raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) # #", "# eat the end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>',", "self.tokens.Next() if 'constant' != token: raise nelly.error('Missing range at line", "= None def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing", "value) (token,value,line,col) = self.tokens.Next() if 'colon' != token: raise nelly.error('Parse", "nelly from .scanner import Scanner from .program import Program from", "len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a reference to the", "= self.tokens.Next() if 'rbracket' == token: if False == start:", "= self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break elif delimiter", "def _quote(self): # this will always be the quoted value", "# def _slice(self): front = None back = None start", "was found, throw an error if None == content: raise", "self.Parse(content) self.pwd.pop() # restore the current tokens self.tokens = self.tokens_stack[-1]", "the current tokens self.tokens = self.tokens_stack[-1] # # Multi-line comments", "token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS)", "column %d', line, col) if lower > upper: lower,upper =", "# consume and disregard the tokens while True: (token,value,line,col) =", "'constant' == token: back = value (token,value,line,col) = self.tokens.Next() elif", "(token,value,line,col) = self.tokens.Next() values = [s for s in value.split('\\n')", "%d', line, col) # parse zero or more expressions until", "disregard the tokens while True: (token,value,line,col) = self.tokens.Next() if 'start_comment'", "col) # parse zero or more expressions until a semicolon", "name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() # parse any", "== token: front = value start = True (token,value,line,col) =", "bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a reference", "self.pwd = [] # setup the scanner based on the", "for the non-terminal if 'lparen' == token: while True: (token,value,line,col)", "elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe',", "token: if False == start: raise nelly.error('Empty slice at line", "self.tokens_stack = [] self.groups_stack = [] self.group_stack = [] self.groups", "specify pre or post in code section') elif 'start_comment' ==", "back = None start = False (token,value,line,col) = self.tokens.Next() if", "%d', line, col) if lower > upper: lower,upper = upper,lower", "col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing", "at line %d, column %d', line, col) # strip and", "= self.tokens_stack[-1] # # Multi-line comments # def _comment(self): #", "<NAME> # import sys import os import re import logging", "non-terminal if 'lparen' == token: while True: (token,value,line,col) = self.tokens.Next()", "= os.path.join(include_dir, path) content = open(fullpath, 'r') logging.debug('Including file %s',", "'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' ==", "upper: lower,upper = upper,lower return (lower,upper) def _weight(self): (token,value,line,col) =", "content: return # compile it inline self.Parse(content) self.pwd.pop() # restore", "'rbracket' == token: if False == start: raise nelly.error('Empty slice", "None for include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath =", "name): # create a new container and add it to", "line, col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name): #", "col) (token,value,line,col) = self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing", "self.tokens.Next() if 'colon' != token: raise nelly.error('Parse error, missing colon", "nonterminal (token,value,line,col) = self.tokens.Next() # parse any optional arguments for", "value # # Compile the Python into a code object", "self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote", "at %d, column %d', line, col) return value # #", "file %s', repr(fullpath)) break except: continue # if no file", "self.tokens.Next() # handle all the top-level tokens if 'nonterminal' ==", "if 'lparen' == token: while True: (token,value,line,col) = self.tokens.Next() if", "found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col)", "import re import logging import nelly from .scanner import Scanner", "for the compiled program self.program = Program() self.tokens_stack = []", "= [s for s in value.split('\\n') if s.strip()] or ['']", "compiled program self.program = Program() self.tokens_stack = [] self.groups_stack =", "'grammars') ] self.pwd = [] # setup the scanner based", "if 'rbracket' == token: if False == start: raise nelly.error('Empty", "!= token: raise nelly.error('Missing comma at line %d, column %d',", "Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() # parse", "raise nelly.error('Missing ] at line %d, column %d', line, col)", "= [] self.groups_stack = [] self.group_stack = [] self.groups =", "= value upper = value (token,value,line,col) = self.tokens.Next() if 'rcurley'", "token: continue elif 'option' == token: nonterminal.options.append(value) if value ==", "!= token: raise nelly.error('Missing } at line %d, column %d',", "repr(value), line, col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name):", "self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression) break", "all the tokens while self.tokens: (token,value,line,col) = self.tokens.Next() # handle", "expression.Statement(Types.TERMINAL, value) elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif", "repr(fullpath)) break except: continue # if no file was found,", "# iterate over all the tokens while self.tokens: (token,value,line,col) =", "= None for include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath", "the tokens while self.tokens: (token,value,line,col) = self.tokens.Next() # handle all", "handle all the top-level tokens if 'nonterminal' == token: if", "token: back = value (token,value,line,col) = self.tokens.Next() elif 'rbracket' !=", "value (token,value,line,col) = self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing", "def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values = [s for", "line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' != token: raise", "column %d', line, col) return (front,back) # # Repeat a", "elif 'varterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL,", "if 'colon' != token: raise nelly.error('Parse error, missing colon at", "at line %d, column %d', line, col) # parse zero", "elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token:", "== token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif", "add it to the program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name]", "nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col)", "self.tokens.Next() if 'constant' == token: front = value start =", "value start = True (token,value,line,col) = self.tokens.Next() if 'rbracket' ==", "%d, column %d', line, col) # strip and rejoin the", "col) if lower > upper: lower,upper = upper,lower return (lower,upper)", "return # compile it inline self.Parse(content) self.pwd.pop() # restore the", "= None back = None start = False (token,value,line,col) =", "value) elif 'include' == token: self._include() elif 'start_python_code' == token:", "%s', token, value) (token,value,line,col) = self.tokens.Next() if 'colon' != token:", "== token: back = value (token,value,line,col) = self.tokens.Next() elif 'rbracket'", "(lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' != token:", "column %d', line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' !=", "'rcurley' != token: raise nelly.error('Missing } at line %d, column", "nelly.error('Missing > at %d, column %d', line, col) return value", "if no file was found, throw an error if None", "upper,lower return (lower,upper) def _weight(self): (token,value,line,col) = self.tokens.Next() if 'constant'", "error if None == content: raise nelly.error('Could not load file", "= Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in", "self._include() elif 'start_python_code' == token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre'))", "elif delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen'", "self.program def _nonterminal(self, _type, name): # create a new container", "'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s',", "(token,ignore,line,col) = self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing >", "code section') elif 'start_comment' == token: self._comment() else: raise nelly.error('Unhandled", "%d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token:", "(token,value,line,col) = self.tokens.Next() # parse any optional arguments for the", "self.tokens.Next() values = [s for s in value.split('\\n') if s.strip()]", "block at line %d, column %d', line, col) # strip", "the compiled program self.program = Program() self.tokens_stack = [] self.groups_stack", "self.tokens = self.tokens_stack[-1] # # Multi-line comments # def _comment(self):", "from .program import Program from .types import * class Parser(object):", "col) return (front,back) # # Repeat a range # def", "# Slice a string # def _slice(self): front = None", "= value (token,value,line,col) = self.tokens.Next() if 'rcurley' == token: return", "quote self.tokens.Next() return value # # Slice a string #", "setup the scanner based on the regular expressions self.scanner =", "'lparen' == token: while True: (token,value,line,col) = self.tokens.Next() if 'rparen'", "line %d, column %d', line, col) (token,ignore,line,col) = self.tokens.Next() if", "line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley' != token: raise", "value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please", "try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range to nothing", "tokens while self.tokens: (token,value,line,col) = self.tokens.Next() # handle all the", "IndexError: raise nelly.error('Applying range to nothing at line %d, column", "each include directory, ignore errors content = None for include_dir", "# compile it inline self.Parse(content) self.pwd.pop() # restore the current", "= '\\n'.join(s[len(ws):] for s in values) # eat the end_python_code", "'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token:", "nelly.error('Applying range to nothing at line %d, column %d', line,", "(token,value,line,col) = self.tokens.Next() if 'colon' != token: raise nelly.error('Parse error,", "nelly.error('Missing colon at line %d, column %d', line, col) (token,value,line,col)", "line, col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: upper", "raise nelly.error('Unhandled %s %s at %d:%d', token, repr(value), line, col)", "range at line %d, column %d', line, col) (token,value,line,col) =", "# import sys import os import re import logging import", "consume and disregard the tokens while True: (token,value,line,col) = self.tokens.Next()", "the quoted value (token,value,line,col) = self.tokens.Next() # this will always", "the Python into a code object # def _python_code(self, name):", "elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre", "at line %d, column %d', line, col) return (front,front+1) elif", "== start: raise nelly.error('Empty slice at line %d, column %d',", "Multi-line comments # def _comment(self): # consume and disregard the", "if False == start: raise nelly.error('Empty slice at line %d,", "# file names are quoted if token not in ['start_single_quote',", "upper = 0 (token,value,line,col) = self.tokens.Next() if 'constant' != token:", "except SyntaxError as e: raise nelly.error('%d: %s: %s', e.lineno, e.msg,", "'rbracket' != token: raise nelly.error('Missing ] at line %d, column", "nelly.error('Missing weight at line %d, column %d', line, col) (token,ignore,line,col)", "= re.compile(r'\\s*').match(values[0]).group() # check indentation if [s for s in", "sys import os import re import logging import nelly from", "'comma' == token: continue elif 'option' == token: nonterminal.options.append(value) if", "%d', line, col) return (front,back) # # Repeat a range", "token: raise nelly.error('Missing colon at line %d, column %d', line,", "nelly.error('quoted file path expected') # get the quoted value path", "\"%s\" at line %d, column %d', token, line, col) def", "the terminal quote self.tokens.Next() return value # # Slice a", "= value (token,value,line,col) = self.tokens.Next() elif 'rbracket' != token: raise", "'nonterminal' == token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value)", "= Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled program self.program", "(token,value,line,col) = self.tokens.Next() if 'constant' == token: upper = value", "raise nelly.error('Missing } at line %d, column %d', line, col)", "_nonterminal(self, _type, name): # create a new container and add", "'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal", "sentinel == token: nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression)", "the file in each include directory, ignore errors content =", "self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir, path) content =", "nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col))", "value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' ==", "try opening the file in each include directory, ignore errors", "'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes',", "line, col) return (front,back) # # Repeat a range #", "== value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or post", "return value # # Slice a string # def _slice(self):", "anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token", "over all the tokens while self.tokens: (token,value,line,col) = self.tokens.Next() #", "if value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' ==", "'constant' == token: upper = value else: raise nelly.error('Missing range", "%d, column %d', line, col) elif 'lcurley' == token: try:", "column %d', line, col) # parse zero or more expressions", "] self.pwd = [] # setup the scanner based on", "_expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col))", "= self.tokens.Next() elif 'rbracket' != token: raise nelly.error('Missing ] at", "anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote()", "# (c) 2008-2020 <NAME> # import sys import os import", "nelly.error('Unhandled token \"%s\" at line %d, column %d', token, line,", "parsed self.tokens_stack.append(self.tokens) # iterate over all the tokens while self.tokens:", "comma at line %d, column %d', line, col) (token,value,line,col) =", "'start_comment' == token: self._comment() else: raise nelly.error('Unhandled %s %s at", "_comment(self): # consume and disregard the tokens while True: (token,value,line,col)", "strip and rejoin the code codeblock = '\\n'.join(s[len(ws):] for s", "byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' ==", "return value # # Compile the Python into a code", "= [] # setup the scanner based on the regular", "== token: break elif 'comma' == token: continue elif 'option'", "except IndexError: raise nelly.error('Applying slice to nothing at line %d,", "raise nelly.error('Empty slice at line %d, column %d', line, col)", "elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL,", "expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code'", "for include_dir in self.pwd[-1:] + self.include_dirs: try: fullpath = os.path.join(include_dir,", "line %d, column %d', line, col) if lower > upper:", "def _slice(self): front = None back = None start =", "at line %d, column %d', line, col) if lower >", "(token,value,line,col) = self.tokens.Next() if 'rparen' == token: break elif 'comma'", "self.include_dirs: try: fullpath = os.path.join(include_dir, path) content = open(fullpath, 'r')", "(token,value,line,col) = self.tokens.Next() if 'rcurley' == token: return (lower,upper) elif", "expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice())", "token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice to", "= [] self.group_stack = [] self.groups = None self.group =", "nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s', token, value) (token,value,line,col)", "expected') # get the quoted value path = self._quote() #", "value # # Slice a string # def _slice(self): front", "Program() self.tokens_stack = [] self.groups_stack = [] self.group_stack = []", "None self.group = None def Parse(self, grammarFile): grammar = grammarFile.read()", "ws = re.compile(r'\\s*').match(values[0]).group() # check indentation if [s for s", "= nonterminal (token,value,line,col) = self.tokens.Next() # parse any optional arguments", "col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: back =", "value == 'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else:", "!= token: raise nelly.error('Missing weight at line %d, column %d',", "Compile the Python into a code object # def _python_code(self,", "post in code section') elif 'start_comment' == token: self._comment() else:", "from .scanner import Scanner from .program import Program from .types", "[s for s in values if not s.startswith(ws)]: raise nelly.error('Bad", "s.startswith(ws)]: raise nelly.error('Bad indentation in code block at line %d,", "token: self._comment() else: raise nelly.error('Unhandled %s %s at %d:%d', token,", "'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying", "import os import re import logging import nelly from .scanner", "token \"%s\" at line %d, column %d', token, line, col)", "grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name,", "'constant' != token: raise nelly.error('Missing range at line %d, column", "] at line %d, column %d', line, col) return (front,back)", "!= token: raise nelly.error('Missing ] at line %d, column %d',", "line, col) # strip and rejoin the code codeblock =", "open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break except: continue #", "include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars') ] self.pwd", "elif 'include' == token: self._include() elif 'start_python_code' == token: if", "into a code object # def _python_code(self, name): (token,value,line,col) =", "# keep a reference to the tokens for when included", "%d', line, col) (token,ignore,line,col) = self.tokens.Next() if 'rangle' != token:", "for when included files are parsed self.tokens_stack.append(self.tokens) # iterate over", "def _range(self): lower = 0 upper = 0 (token,value,line,col) =", "optional arguments for the non-terminal if 'lparen' == token: while", "raise nelly.error('Missing colon at line %d, column %d', line, col)", "ignore empty file if not content: return # compile it", "if 'rparen' == token: break elif 'comma' == token: continue", "Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel == token:", "'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown", "nothing at line %d, column %d', line, col) elif 'langle'", "== content: raise nelly.error('Could not load file %s', repr(path)) #", "column %d', line, col) return (front,front+1) elif 'colon' != token:", "%d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'rcurley'", "s in value.split('\\n') if s.strip()] or [''] # save the", "token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range to", "grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens =", "'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL,", "!= token: raise nelly.error('Parse error, missing colon at line %d,", "repr(path)) # ignore empty file if not content: return #", "import logging import nelly from .scanner import Scanner from .program", "value.startswith('::'): value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token:", "else: raise nelly.error('Unknown option: %s %s', token, value) (token,value,line,col) =", "tokens if 'nonterminal' == token: if value.startswith('::'): value = value[2:]", "the first line ws = re.compile(r'\\s*').match(values[0]).group() # check indentation if", "indentation in code block at line %d, column %d', line,", "range to nothing at line %d, column %d', line, col)", "class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs + [", "values if not s.startswith(ws)]: raise nelly.error('Bad indentation in code block", "= self.tokens.Next() if 'start_comment' == token: self._comment() if 'end_comment' ==", "nelly.error('Missing comma at line %d, column %d', line, col) (token,value,line,col)", "elif 'start_python_code' == token: if r'<%pre' == value: self.program.preamble.append(self._python_code('pre')) elif", "# parse any optional arguments for the non-terminal if 'lparen'", "= self.tokens.Next() if 'rcurley' == token: return (lower,upper) elif 'comma'", "Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root,", "token, value) (token,value,line,col) = self.tokens.Next() if 'colon' != token: raise", ".types import * class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs =", "regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the", "load file %s', repr(path)) # ignore empty file if not", "the top-level tokens if 'nonterminal' == token: if value.startswith('::'): value", "at %d:%d', token, repr(value), line, col) self.tokens_stack.pop() return self.program def", "nelly.error('Unknown option: %s %s', token, value) (token,value,line,col) = self.tokens.Next() if", "# parse zero or more expressions until a semicolon is", "logging.debug('Including file %s', repr(fullpath)) break except: continue # if no", "# Repeat a range # def _range(self): lower = 0", "semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self, delimiter, sentinel,", "None back = None start = False (token,value,line,col) = self.tokens.Next()", "nelly.error('Missing range at line %d, column %d', line, col) (token,value,line,col)", "(c) 2008-2020 <NAME> # import sys import os import re", "'start_triple_quote']: raise nelly.error('quoted file path expected') # get the quoted", "# get the quoted value path = self._quote() # try", "while True: (token,value,line,col) = self.tokens.Next() if 'rparen' == token: break", "token: raise nelly.error('Missing range at line %d, column %d', line,", "line, col) # parse zero or more expressions until a", "pass else: raise nelly.error('Unhandled token \"%s\" at line %d, column", "e.lineno, e.msg, repr(e.text)) # # Include other BNF files #", "# # Include other BNF files # def _include(self): (token,value,line,col)", "col) # strip and rejoin the code codeblock = '\\n'.join(s[len(ws):]", "+ [ os.path.join(nelly.root, 'grammars') ] self.pwd = [] # setup", "== token: while True: (token,value,line,col) = self.tokens.Next() if 'rparen' ==", "expression = Expression((line,col)) while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel", "self.program = Program() self.tokens_stack = [] self.groups_stack = [] self.group_stack", "nelly.error('Unhandled %s %s at %d:%d', token, repr(value), line, col) self.tokens_stack.pop()", "['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal'", "(token,value,line,col) = self.tokens.Next() if 'constant' == token: back = value", "in code block at line %d, column %d', line, col)", "= self._quote() # try opening the file in each include", "value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif 'start_python_code' ==", "and disregard the tokens while True: (token,value,line,col) = self.tokens.Next() if", "(token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while self.tokens: (token,value,line,col) =", "Slice a string # def _slice(self): front = None back", "col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except IndexError:", "== token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' == token:", "'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted file path expected') # get the", "grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar) # keep a reference to", "self.tokens.Next() # file names are quoted if token not in", "%s', repr(fullpath)) break except: continue # if no file was", "self.tokens.Next() if 'rangle' != token: raise nelly.error('Missing > at %d,", "= self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except", "self.tokens.Next() if 'rbracket' == token: if False == start: raise", "opening the file in each include directory, ignore errors content", "True: (token,value,line,col) = self.tokens.Next() if 'start_comment' == token: self._comment() if", "def _include(self): (token,value,line,col) = self.tokens.Next() # file names are quoted", "> upper: lower,upper = upper,lower return (lower,upper) def _weight(self): (token,value,line,col)", "in each include directory, ignore errors content = None for", "at line %d, column %d', token, line, col) def _quote(self):", "path) content = open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break", "expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled", "elif 'empty' == token: pass else: raise nelly.error('Unhandled token \"%s\"", "self._comment() else: raise nelly.error('Unhandled %s %s at %d:%d', token, repr(value),", "# Include other BNF files # def _include(self): (token,value,line,col) =", "will always be the quoted value (token,value,line,col) = self.tokens.Next() #", "nonterminal) def _expressions(self, delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression", "be the quoted value (token,value,line,col) = self.tokens.Next() # this will", "value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant'", "(token,value,line,col) = self.tokens.Next() # handle all the top-level tokens if", "# this will always be the quoted value (token,value,line,col) =", "# def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values = [s", "elif 'start_python_code' == token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' ==", "column %d', line, col) return value # # Compile the", "token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as", "file in each include directory, ignore errors content = None", "except: continue # if no file was found, throw an", "= 0 (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise", "s in values) # eat the end_python_code token self.tokens.Next() try:", "at line %d, column %d', line, col) (token,ignore,line,col) = self.tokens.Next()", "token: raise nelly.error('Missing weight at line %d, column %d', line,", "any optional arguments for the non-terminal if 'lparen' == token:", "self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError:", "names are quoted if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']:", "== token: self._include() elif 'start_python_code' == token: if r'<%pre' ==", "the regular expressions self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for", "container and add it to the program nonterminal = Nonterminal(_type,", "== token: self._comment() else: raise nelly.error('Unhandled %s %s at %d:%d',", "self.tokens: (token,value,line,col) = self.tokens.Next() # handle all the top-level tokens", "'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE,", "expression.Statement(Types.NONTERMINAL, value) elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref'", "== token: expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal =", "!= token: raise nelly.error('Missing > at %d, column %d', line,", "other BNF files # def _include(self): (token,value,line,col) = self.tokens.Next() #", "Program from .types import * class Parser(object): def __init__(self, include_dirs=[]):", "path = self._quote() # try opening the file in each", "break except: continue # if no file was found, throw", "= Expression((line,col)) elif 'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS,", "== token: nonterminal.expressions.append(expression) break elif delimiter == token: nonterminal.expressions.append(expression) expression", "== token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal)", "this will always be the quoted value (token,value,line,col) = self.tokens.Next()", "always be the terminal quote self.tokens.Next() return value # #", "no file was found, throw an error if None ==", "'comma' != token: raise nelly.error('Missing comma at line %d, column", "break elif 'comma' == token: continue elif 'option' == token:", "anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']:", "token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal)", "expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote =", "section') elif 'start_comment' == token: self._comment() else: raise nelly.error('Unhandled %s", "_weight(self): (token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing", "if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif 'varterminal' ==", "# check indentation if [s for s in values if", "token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote)", "compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e: raise nelly.error('%d: %s:", "are quoted if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise", "inline self.Parse(content) self.pwd.pop() # restore the current tokens self.tokens =", "= value start = True (token,value,line,col) = self.tokens.Next() if 'rbracket'", "token: while True: (token,value,line,col) = self.tokens.Next() if 'rparen' == token:", "%d, column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant'", "self.group_stack = [] self.groups = None self.group = None def", "%s %s at %d:%d', token, repr(value), line, col) self.tokens_stack.pop() return", "value: self.program.postscript.append(self._python_code('post')) else: raise nelly.error('Please specify pre or post in", "raise nelly.error('Bad indentation in code block at line %d, column", "codeblock = '\\n'.join(s[len(ws):] for s in values) # eat the", "column %d', line, col) elif 'lcurley' == token: try: expression.Operation(Types.RANGE,", "at line %d, column %d', line, col) elif 'lcurley' ==", "token: upper = value else: raise nelly.error('Missing range at line", "'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote =", "column %d', line, col) (token,value,line,col) = self.tokens.Next() if 'constant' ==", "values = [s for s in value.split('\\n') if s.strip()] or", "continue # if no file was found, throw an error", "%d', line, col) lower = value upper = value (token,value,line,col)", "__init__(self, include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars') ]", "of the first line ws = re.compile(r'\\s*').match(values[0]).group() # check indentation", "= [] self.groups = None self.group = None def Parse(self,", "nelly.error('Missing } at line %d, column %d', line, col) if", "Python into a code object # def _python_code(self, name): (token,value,line,col)", "<gh_stars>0 # # (c) 2008-2020 <NAME> # import sys import", "parse any optional arguments for the non-terminal if 'lparen' ==", "raise nelly.error('Unhandled token \"%s\" at line %d, column %d', token,", "column %d', line, col) elif 'langle' == token: expression.Weight(self._weight()) elif", "if 'constant' != token: raise nelly.error('Missing weight at line %d,", "weight at line %d, column %d', line, col) (token,ignore,line,col) =", "col) self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name): # create", "functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' ==", "if not s.startswith(ws)]: raise nelly.error('Bad indentation in code block at", "os import re import logging import nelly from .scanner import", "= self.scanner.Scan(grammar) # keep a reference to the tokens for", "an error if None == content: raise nelly.error('Could not load", "%d, column %d', line, col) # parse zero or more", "create a new container and add it to the program", "col) (token,value,line,col) = self.tokens.Next() if 'constant' == token: upper =", "line, col) elif 'langle' == token: expression.Weight(self._weight()) elif 'empty' ==", "byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL,", "= False (token,value,line,col) = self.tokens.Next() if 'constant' == token: front", "file %s', repr(path)) # ignore empty file if not content:", "== 'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise", "expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:]) elif", "self.tokens.Next() return value # # Slice a string # def", "nelly.error('Could not load file %s', repr(path)) # ignore empty file", "== value: self.program.preamble.append(self._python_code('pre')) elif r'<%post' == value: self.program.postscript.append(self._python_code('post')) else: raise", "(token,value,line,col) = self.tokens.Next() # this will always be the terminal", "colon at line %d, column %d', line, col) (token,value,line,col) =", "if value == 'start': self.program.start.append(name) elif 'decorator' == token: nonterminal.decorators.append(value[1:])", "a range # def _range(self): lower = 0 upper =", "self._range()) except IndexError: raise nelly.error('Applying range to nothing at line", "= open(fullpath, 'r') logging.debug('Including file %s', repr(fullpath)) break except: continue", "= self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing } at", "value = value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include()", "elif 'decorator' == token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s", "%d:%d', token, repr(value), line, col) self.tokens_stack.pop() return self.program def _nonterminal(self,", "%d, column %d', line, col) if lower > upper: lower,upper", "raise nelly.error('quoted file path expected') # get the quoted value", "'rparen' == token: break elif 'comma' == token: continue elif", "column %d', line, col) lower = value upper = value", "import nelly from .scanner import Scanner from .program import Program", "expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen', anonterminal) elif token in ['start_single_quote', 'start_double_quote',", ".scanner import Scanner from .program import Program from .types import", "# this will always be the terminal quote self.tokens.Next() return", "'colon' != token: raise nelly.error('Missing colon at line %d, column", "%d', line, col) return value # # Compile the Python", "end_python_code token self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError", "for s in values if not s.startswith(ws)]: raise nelly.error('Bad indentation", "if 'rcurley' == token: return (lower,upper) elif 'comma' != token:", "self.tokens.Next() if 'rcurley' != token: raise nelly.error('Missing } at line", "files are parsed self.tokens_stack.append(self.tokens) # iterate over all the tokens", "self.scanner.Scan(grammar) # keep a reference to the tokens for when", "(token,value,line,col) = self.tokens.Next() if 'constant' != token: raise nelly.error('Missing weight", "self.tokens.Next() try: return compile(codeblock, '<'+name+'>', 'exec') except SyntaxError as e:", "%s %s', token, value) (token,value,line,col) = self.tokens.Next() if 'colon' !=", "False == start: raise nelly.error('Empty slice at line %d, column", "[] # setup the scanner based on the regular expressions", "functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif 'reference' == token: expression.Statement(Types.REFERENCE, value[1:])", "expression.Operation(Types.RANGE, self._range()) except IndexError: raise nelly.error('Applying range to nothing at", "True: (token,value,line,col) = self.tokens.Next() if 'rparen' == token: break elif", "# setup the scanner based on the regular expressions self.scanner", "line, col) if lower > upper: lower,upper = upper,lower return", "logging import nelly from .scanner import Scanner from .program import", "== token: if value.startswith('::'): value = value[2:] self._nonterminal(Types.NONTERMINAL, value) elif", "= self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value)", "functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION, value[1:], functerminal) elif", "value upper = value (token,value,line,col) = self.tokens.Next() if 'rcurley' ==", "colon at line %d, column %d', line, col) # parse", "the tokens while True: (token,value,line,col) = self.tokens.Next() if 'start_comment' ==", "Repeat a range # def _range(self): lower = 0 upper", "os.path.join(nelly.root, 'grammars') ] self.pwd = [] # setup the scanner", "self.scanner = Scanner(os.path.join(nelly.root, 'rules.lex')) # container for the compiled program", "token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' == token: anonterminal", "else: raise nelly.error('Unhandled %s %s at %d:%d', token, repr(value), line,", "'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma', 'rparen', functerminal) expression.Statement(Types.FUNCTION,", "code object # def _python_code(self, name): (token,value,line,col) = self.tokens.Next() values", "import sys import os import re import logging import nelly", "# try opening the file in each include directory, ignore", "raise nelly.error('Missing > at %d, column %d', line, col) return", "nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' == token: anonterminal =", "front = value start = True (token,value,line,col) = self.tokens.Next() if", "* class Parser(object): def __init__(self, include_dirs=[]): self.include_dirs = include_dirs +", "def __init__(self, include_dirs=[]): self.include_dirs = include_dirs + [ os.path.join(nelly.root, 'grammars')", "program nonterminal = Nonterminal(_type, name) self.program.nonterminals[name] = nonterminal (token,value,line,col) =", "'lparen' == token: anonterminal = Nonterminal(Types.ANONYMOUS) expression.Statement(Types.ANONYMOUS, anonterminal) self._expressions('pipe', 'rparen',", "code codeblock = '\\n'.join(s[len(ws):] for s in values) # eat", "delimiter == token: nonterminal.expressions.append(expression) expression = Expression((line,col)) elif 'lparen' ==", "quoted if token not in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: raise nelly.error('quoted", "self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s (%d bytes)', grammarFile.name, len(grammar)) self.tokens = self.scanner.Scan(grammar)", ".program import Program from .types import * class Parser(object): def", "'\\n'.join(s[len(ws):] for s in values) # eat the end_python_code token", "delimiter, sentinel, nonterminal): (token,value,line,col) = self.tokens.Peek() expression = Expression((line,col)) while", "'r') logging.debug('Including file %s', repr(fullpath)) break except: continue # if", "at line %d, column %d', line, col) (token,value,line,col) = self.tokens.Next()", "token, line, col) def _quote(self): # this will always be", "%d, column %d', line, col) return (front,back) # # Repeat", "start: raise nelly.error('Empty slice at line %d, column %d', line,", "back = value (token,value,line,col) = self.tokens.Next() elif 'rbracket' != token:", "# Multi-line comments # def _comment(self): # consume and disregard", "= 0 upper = 0 (token,value,line,col) = self.tokens.Next() if 'constant'", "== token: nonterminal.options.append(value) if value == 'start': self.program.start.append(name) elif 'decorator'", "ignore errors content = None for include_dir in self.pwd[-1:] +", "in ['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif", "['start_single_quote', 'start_double_quote', 'start_triple_quote']: quote = self._quote() expression.Statement(Types.TERMINAL, quote) elif token", "raise nelly.error('Missing comma at line %d, column %d', line, col)", "= Program() self.tokens_stack = [] self.groups_stack = [] self.group_stack =", "self.tokens.Next() # this will always be the terminal quote self.tokens.Next()", "self.program.nonterminals[name] = nonterminal (token,value,line,col) = self.tokens.Next() # parse any optional", "# Compile the Python into a code object # def", "in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']: byte_quote = self._quote() expression.Statement(Types.TERMINAL, byte_quote) elif", "col) elif 'langle' == token: expression.Weight(self._weight()) elif 'empty' == token:", "col) return value # # Compile the Python into a", "expression.Statement(Types.TERMINAL, byte_quote) elif 'nonterminal' == token: expression.Statement(Types.NONTERMINAL, value) elif 'varterminal'", "re.compile(r'\\s*').match(values[0]).group() # check indentation if [s for s in values", "until a semicolon is found self._expressions('pipe', 'semicolon', nonterminal) def _expressions(self,", "token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s', token, value)", "include directory, ignore errors content = None for include_dir in", "_range(self): lower = 0 upper = 0 (token,value,line,col) = self.tokens.Next()", "or more expressions until a semicolon is found self._expressions('pipe', 'semicolon',", "found, throw an error if None == content: raise nelly.error('Could", "== token: try: expression.Operation(Types.SLICE, self._slice()) except IndexError: raise nelly.error('Applying slice", "name): (token,value,line,col) = self.tokens.Next() values = [s for s in", "string # def _slice(self): front = None back = None", "nelly.error('Parse error, missing colon at line %d, column %d', line,", "expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif 'function'", "(token,value,line,col) = self.tokens.Next() if 'constant' == token: front = value", "missing colon at line %d, column %d', line, col) #", "this will always be the terminal quote self.tokens.Next() return value", "path expected') # get the quoted value path = self._quote()", "= self._quote() expression.Statement(Types.TERMINAL, quote) elif token in ['start_single_bytes', 'start_double_bytes', 'start_triple_bytes']:", "= self.tokens.Next() values = [s for s in value.split('\\n') if", "when included files are parsed self.tokens_stack.append(self.tokens) # iterate over all", "self.tokens_stack.pop() return self.program def _nonterminal(self, _type, name): # create a", "token: expression.Statement(Types.REFERENCE, value[1:]) elif 'constant' == token: expression.Statement(Types.TERMINAL, value) elif", "token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value) elif", "arguments for the non-terminal if 'lparen' == token: while True:", "None def Parse(self, grammarFile): grammar = grammarFile.read() self.pwd.append(os.path.dirname(grammarFile.name)) logging.debug('Parsing %s", "!= token: raise nelly.error('Missing range at line %d, column %d',", "== token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token: expression.Statement(Types.BACKREFERENCE, value)", "True (token,value,line,col) = self.tokens.Next() if 'rbracket' == token: if False", "pre or post in code section') elif 'start_comment' == token:", "value path = self._quote() # try opening the file in", "save the whitepsace of the first line ws = re.compile(r'\\s*').match(values[0]).group()", "nelly.error('Please specify pre or post in code section') elif 'start_comment'", "= self.tokens.Next() if 'colon' != token: raise nelly.error('Parse error, missing", "continue elif 'option' == token: nonterminal.options.append(value) if value == 'start':", "'langle' == token: expression.Weight(self._weight()) elif 'empty' == token: pass else:", "= self.tokens.Next() # parse any optional arguments for the non-terminal", "self.tokens.Next() if 'rcurley' == token: return (lower,upper) elif 'comma' !=", "while self.tokens: (token,value,line,col) = self.tokens.Next() if sentinel == token: nonterminal.expressions.append(expression)", "raise nelly.error('Applying range to nothing at line %d, column %d',", "elif 'lcurley' == token: try: expression.Operation(Types.RANGE, self._range()) except IndexError: raise", "expression.Statement(Types.BACKREFERENCE, value) elif 'function' == token: functerminal = Nonterminal(Types.ANONYMOUS) self._expressions('comma',", "value (token,value,line,col) = self.tokens.Next() if 'rcurley' == token: return (lower,upper)", "if 'rangle' != token: raise nelly.error('Missing > at %d, column", "are parsed self.tokens_stack.append(self.tokens) # iterate over all the tokens while", "(lower,upper) elif 'comma' != token: raise nelly.error('Missing comma at line", "= self.tokens.Next() # this will always be the terminal quote", "_slice(self): front = None back = None start = False", "token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token: try: expression.Operation(Types.SLICE,", "col) lower = value upper = value (token,value,line,col) = self.tokens.Next()", "== token: upper = value else: raise nelly.error('Missing range at", "line, col) lower = value upper = value (token,value,line,col) =", "else: raise nelly.error('Missing range at line %d, column %d', line,", "== token: nonterminal.decorators.append(value[1:]) else: raise nelly.error('Unknown option: %s %s', token,", "== token: expression.code = self._python_code(nonterminal.name) elif 'lbracket' == token: try:", "range at line %d, column %d', line, col) lower =", "raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) # # Include", "if 'constant' == token: front = value start = True", "as e: raise nelly.error('%d: %s: %s', e.lineno, e.msg, repr(e.text)) #", "keep a reference to the tokens for when included files", "elif 'varterminal' == token: expression.Statement(Types.VARTERMINAL, value) elif 'backref' == token:", "or [''] # save the whitepsace of the first line", "%s', e.lineno, e.msg, repr(e.text)) # # Include other BNF files", "rejoin the code codeblock = '\\n'.join(s[len(ws):] for s in values)", "elif 'langle' == token: expression.Weight(self._weight()) elif 'empty' == token: pass", "line %d, column %d', line, col) lower = value upper", "lower = 0 upper = 0 (token,value,line,col) = self.tokens.Next() if", "value[2:] self._nonterminal(Types.VARTERMINAL, value) elif 'include' == token: self._include() elif 'start_python_code'", "[] self.group_stack = [] self.groups = None self.group = None", "if 'rcurley' != token: raise nelly.error('Missing } at line %d," ]
[ "<filename>qcodes/utils/installation_info.py \"\"\" This module contains helper functions that provide information", "pip whether QCoDeS is installed in editable mode and return", "of the names of the packages that QCoDeS requires \"\"\"", "packages are installed along with QCoDeS \"\"\" import sys from", "def is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try to ask pip whether", "log.warning(f'{type(e)}: {str(e)}') answer = None return answer def get_qcodes_version() ->", "that QCoDeS requires \"\"\" qc_pkg = distribution('qcodes').requires if qc_pkg is", "except Exception as e: # we actually do want a", "a catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None return answer", "def get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\" Return a dictionary of", "name to version string. If an (optional) dependency is not", "to version string. If an (optional) dependency is not installed", "installed the name maps to \"Not installed\". \"\"\" req_names =", "in qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\"", "req_names: try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] = \"Not", "if qc_pkg is None: return [] package_names = [list(requirements.parse(req))[0].name for", "[list(requirements.parse(req))[0].name for req in qc_pkg] return package_names def get_qcodes_requirements_versions() ->", "answer a boolean. Returns None if pip somehow did not", "None return answer def get_qcodes_version() -> str: \"\"\" Get the", "QCoDeS \"\"\" import sys from typing import Dict, List, Optional", "\"\"\" This module contains helper functions that provide information about", "dictionary of the currently installed versions of the packages that", "Optional import subprocess import json import logging import requirements if", "import distribution, version, PackageNotFoundError else: # 3.7 and earlier from", "respond as expected. \"\"\" answer: Optional[bool] try: pipproc = subprocess.run(['python',", "8): from importlib.metadata import distribution, version, PackageNotFoundError else: # 3.7", "any([d[\"name\"] == 'qcodes' for d in e_pkgs]) except Exception as", "str: \"\"\" Get the version of the currently installed QCoDeS", "a list of the names of the packages that QCoDeS", "-> Optional[bool]: \"\"\" Try to ask pip whether QCoDeS is", "is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try to ask pip whether QCoDeS", "return the answer a boolean. Returns None if pip somehow", "along with QCoDeS \"\"\" import sys from typing import Dict,", "answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e',", "package_names def get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\" Return a dictionary", "the currently installed QCoDeS \"\"\" return qcodes.version.__version__ def get_qcodes_requirements() ->", "if sys.version_info >= (3, 8): from importlib.metadata import distribution, version,", "from importlib.metadata import distribution, version, PackageNotFoundError else: # 3.7 and", "provide information about how QCoDeS is installed and about what", "Return a dictionary of the currently installed versions of the", "QCoDeS requires \"\"\" qc_pkg = distribution('qcodes').requires if qc_pkg is None:", "sys.version_info >= (3, 8): from importlib.metadata import distribution, version, PackageNotFoundError", "e: # we actually do want a catch-all here log.warning(f'{type(e)}:", "whether QCoDeS is installed in editable mode and return the", "with QCoDeS \"\"\" import sys from typing import Dict, List,", "importlib.metadata import distribution, version, PackageNotFoundError else: # 3.7 and earlier", "packages that QCoDeS requires. The dict maps package name to", "# 3.7 and earlier from importlib_metadata import distribution, version, PackageNotFoundError", "Optional[bool]: \"\"\" Try to ask pip whether QCoDeS is installed", "other packages are installed along with QCoDeS \"\"\" import sys", "that provide information about how QCoDeS is installed and about", "QCoDeS is installed and about what other packages are installed", "def get_qcodes_requirements() -> List[str]: \"\"\" Return a list of the", "distribution('qcodes').requires if qc_pkg is None: return [] package_names = [list(requirements.parse(req))[0].name", "string. If an (optional) dependency is not installed the name", "Dict, List, Optional import subprocess import json import logging import", "try: pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'],", "installed\". \"\"\" req_names = get_qcodes_requirements() req_versions = {} for req", "requires. The dict maps package name to version string. If", "Get the version of the currently installed QCoDeS \"\"\" return", "return package_names def get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\" Return a", "(optional) dependency is not installed the name maps to \"Not", "Try to ask pip whether QCoDeS is installed in editable", "distribution, version, PackageNotFoundError else: # 3.7 and earlier from importlib_metadata", "maps package name to version string. If an (optional) dependency", "json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] == 'qcodes' for d in e_pkgs])", "\"\"\" Return a list of the names of the packages", "None if pip somehow did not respond as expected. \"\"\"", "[] package_names = [list(requirements.parse(req))[0].name for req in qc_pkg] return package_names", "req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] = \"Not installed\" return", "information about how QCoDeS is installed and about what other", "= [list(requirements.parse(req))[0].name for req in qc_pkg] return package_names def get_qcodes_requirements_versions()", "editable mode and return the answer a boolean. Returns None", "in editable mode and return the answer a boolean. Returns", "boolean. Returns None if pip somehow did not respond as", "how QCoDeS is installed and about what other packages are", "= any([d[\"name\"] == 'qcodes' for d in e_pkgs]) except Exception", "None: return [] package_names = [list(requirements.parse(req))[0].name for req in qc_pkg]", "The dict maps package name to version string. If an", "qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try", "-> str: \"\"\" Get the version of the currently installed", "requirements if sys.version_info >= (3, 8): from importlib.metadata import distribution,", "\"\"\" Return a dictionary of the currently installed versions of", "try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] = \"Not installed\"", "'--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] ==", "stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] == 'qcodes' for", "currently installed versions of the packages that QCoDeS requires. The", "str]: \"\"\" Return a dictionary of the currently installed versions", "= logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try to ask", "import logging import requirements if sys.version_info >= (3, 8): from", "{} for req in req_names: try: req_versions[req] = version(req) except", "Return a list of the names of the packages that", "mode and return the answer a boolean. Returns None if", "installed in editable mode and return the answer a boolean.", "import distribution, version, PackageNotFoundError import qcodes log = logging.getLogger(__name__) def", "typing import Dict, List, Optional import subprocess import json import", "a boolean. Returns None if pip somehow did not respond", "subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs", "'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8'))", "= None return answer def get_qcodes_version() -> str: \"\"\" Get", "expected. \"\"\" answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip',", "'--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"]", "qc_pkg = distribution('qcodes').requires if qc_pkg is None: return [] package_names", "version, PackageNotFoundError else: # 3.7 and earlier from importlib_metadata import", "is None: return [] package_names = [list(requirements.parse(req))[0].name for req in", "\"\"\" req_names = get_qcodes_requirements() req_versions = {} for req in", "in req_names: try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req] =", "version string. If an (optional) dependency is not installed the", "check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] == 'qcodes'", "version of the currently installed QCoDeS \"\"\" return qcodes.version.__version__ def", "of the currently installed QCoDeS \"\"\" return qcodes.version.__version__ def get_qcodes_requirements()", "for req in req_names: try: req_versions[req] = version(req) except PackageNotFoundError:", "json import logging import requirements if sys.version_info >= (3, 8):", "installed QCoDeS \"\"\" return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: \"\"\"", "to \"Not installed\". \"\"\" req_names = get_qcodes_requirements() req_versions = {}", "distribution, version, PackageNotFoundError import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably()", "get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\" Return a dictionary of the", "else: # 3.7 and earlier from importlib_metadata import distribution, version,", "want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None return", "3.7 and earlier from importlib_metadata import distribution, version, PackageNotFoundError import", "for req in qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str,", "installed and about what other packages are installed along with", "PackageNotFoundError else: # 3.7 and earlier from importlib_metadata import distribution,", "pip somehow did not respond as expected. \"\"\" answer: Optional[bool]", "the packages that QCoDeS requires. The dict maps package name", "\"\"\" return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: \"\"\" Return a", "req in qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str, str]:", "-> Dict[str, str]: \"\"\" Return a dictionary of the currently", "package_names = [list(requirements.parse(req))[0].name for req in qc_pkg] return package_names def", "package name to version string. If an (optional) dependency is", "import requirements if sys.version_info >= (3, 8): from importlib.metadata import", "List, Optional import subprocess import json import logging import requirements", "to ask pip whether QCoDeS is installed in editable mode", "the version of the currently installed QCoDeS \"\"\" return qcodes.version.__version__", "= distribution('qcodes').requires if qc_pkg is None: return [] package_names =", "and earlier from importlib_metadata import distribution, version, PackageNotFoundError import qcodes", "get_qcodes_version() -> str: \"\"\" Get the version of the currently", "\"\"\" qc_pkg = distribution('qcodes').requires if qc_pkg is None: return []", "catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None return answer def", "dict maps package name to version string. If an (optional)", "as e: # we actually do want a catch-all here", "from typing import Dict, List, Optional import subprocess import json", "import Dict, List, Optional import subprocess import json import logging", "(3, 8): from importlib.metadata import distribution, version, PackageNotFoundError else: #", "import json import logging import requirements if sys.version_info >= (3,", "QCoDeS requires. The dict maps package name to version string.", "qc_pkg is None: return [] package_names = [list(requirements.parse(req))[0].name for req", "\"\"\" import sys from typing import Dict, List, Optional import", "do want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer = None", "and about what other packages are installed along with QCoDeS", "req_names = get_qcodes_requirements() req_versions = {} for req in req_names:", "module contains helper functions that provide information about how QCoDeS", "earlier from importlib_metadata import distribution, version, PackageNotFoundError import qcodes log", "for d in e_pkgs]) except Exception as e: # we", "Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index',", "names of the packages that QCoDeS requires \"\"\" qc_pkg =", "of the packages that QCoDeS requires. The dict maps package", "about how QCoDeS is installed and about what other packages", "QCoDeS is installed in editable mode and return the answer", "answer def get_qcodes_version() -> str: \"\"\" Get the version of", "e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] == 'qcodes' for d", "if pip somehow did not respond as expected. \"\"\" answer:", "name maps to \"Not installed\". \"\"\" req_names = get_qcodes_requirements() req_versions", "This module contains helper functions that provide information about how", "of the packages that QCoDeS requires \"\"\" qc_pkg = distribution('qcodes').requires", "e_pkgs]) except Exception as e: # we actually do want", "not installed the name maps to \"Not installed\". \"\"\" req_names", "# we actually do want a catch-all here log.warning(f'{type(e)}: {str(e)}')", "from importlib_metadata import distribution, version, PackageNotFoundError import qcodes log =", "of the currently installed versions of the packages that QCoDeS", "List[str]: \"\"\" Return a list of the names of the", "return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: \"\"\" Return a list", "actually do want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer =", "qc_pkg] return package_names def get_qcodes_requirements_versions() -> Dict[str, str]: \"\"\" Return", "answer = any([d[\"name\"] == 'qcodes' for d in e_pkgs]) except", "Exception as e: # we actually do want a catch-all", "def get_qcodes_version() -> str: \"\"\" Get the version of the", "versions of the packages that QCoDeS requires. The dict maps", "and return the answer a boolean. Returns None if pip", "'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer", "== 'qcodes' for d in e_pkgs]) except Exception as e:", "the packages that QCoDeS requires \"\"\" qc_pkg = distribution('qcodes').requires if", "-> List[str]: \"\"\" Return a list of the names of", "functions that provide information about how QCoDeS is installed and", "\"\"\" Try to ask pip whether QCoDeS is installed in", "contains helper functions that provide information about how QCoDeS is", "helper functions that provide information about how QCoDeS is installed", "\"\"\" answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m', 'pip', 'list',", "an (optional) dependency is not installed the name maps to", "'-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs = json.loads(pipproc.stdout.decode('utf-8')) answer =", "list of the names of the packages that QCoDeS requires", "the currently installed versions of the packages that QCoDeS requires.", "is installed and about what other packages are installed along", "about what other packages are installed along with QCoDeS \"\"\"", "= get_qcodes_requirements() req_versions = {} for req in req_names: try:", "import sys from typing import Dict, List, Optional import subprocess", "pipproc = subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True,", "\"Not installed\". \"\"\" req_names = get_qcodes_requirements() req_versions = {} for", "PackageNotFoundError import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]:", "req_versions = {} for req in req_names: try: req_versions[req] =", "qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: \"\"\" Return a list of", "in e_pkgs]) except Exception as e: # we actually do", "not respond as expected. \"\"\" answer: Optional[bool] try: pipproc =", "here log.warning(f'{type(e)}: {str(e)}') answer = None return answer def get_qcodes_version()", "import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: \"\"\"", "= subprocess.run(['python', '-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE)", "sys from typing import Dict, List, Optional import subprocess import", "what other packages are installed along with QCoDeS \"\"\" import", "get_qcodes_requirements() -> List[str]: \"\"\" Return a list of the names", "that QCoDeS requires. The dict maps package name to version", "ask pip whether QCoDeS is installed in editable mode and", "get_qcodes_requirements() req_versions = {} for req in req_names: try: req_versions[req]", "= version(req) except PackageNotFoundError: req_versions[req] = \"Not installed\" return req_versions", "Returns None if pip somehow did not respond as expected.", "maps to \"Not installed\". \"\"\" req_names = get_qcodes_requirements() req_versions =", "a dictionary of the currently installed versions of the packages", "dependency is not installed the name maps to \"Not installed\".", "logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try to ask pip", "d in e_pkgs]) except Exception as e: # we actually", "packages that QCoDeS requires \"\"\" qc_pkg = distribution('qcodes').requires if qc_pkg", "is installed in editable mode and return the answer a", "= {} for req in req_names: try: req_versions[req] = version(req)", ">= (3, 8): from importlib.metadata import distribution, version, PackageNotFoundError else:", "logging import requirements if sys.version_info >= (3, 8): from importlib.metadata", "= json.loads(pipproc.stdout.decode('utf-8')) answer = any([d[\"name\"] == 'qcodes' for d in", "we actually do want a catch-all here log.warning(f'{type(e)}: {str(e)}') answer", "currently installed QCoDeS \"\"\" return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]:", "importlib_metadata import distribution, version, PackageNotFoundError import qcodes log = logging.getLogger(__name__)", "QCoDeS \"\"\" return qcodes.version.__version__ def get_qcodes_requirements() -> List[str]: \"\"\" Return", "are installed along with QCoDeS \"\"\" import sys from typing", "did not respond as expected. \"\"\" answer: Optional[bool] try: pipproc", "somehow did not respond as expected. \"\"\" answer: Optional[bool] try:", "'qcodes' for d in e_pkgs]) except Exception as e: #", "version, PackageNotFoundError import qcodes log = logging.getLogger(__name__) def is_qcodes_installed_editably() ->", "subprocess import json import logging import requirements if sys.version_info >=", "log = logging.getLogger(__name__) def is_qcodes_installed_editably() -> Optional[bool]: \"\"\" Try to", "{str(e)}') answer = None return answer def get_qcodes_version() -> str:", "answer = None return answer def get_qcodes_version() -> str: \"\"\"", "\"\"\" Get the version of the currently installed QCoDeS \"\"\"", "Dict[str, str]: \"\"\" Return a dictionary of the currently installed", "the name maps to \"Not installed\". \"\"\" req_names = get_qcodes_requirements()", "the names of the packages that QCoDeS requires \"\"\" qc_pkg", "return [] package_names = [list(requirements.parse(req))[0].name for req in qc_pkg] return", "is not installed the name maps to \"Not installed\". \"\"\"", "as expected. \"\"\" answer: Optional[bool] try: pipproc = subprocess.run(['python', '-m',", "import subprocess import json import logging import requirements if sys.version_info", "If an (optional) dependency is not installed the name maps", "'-m', 'pip', 'list', '-e', '--no-index', '--format=json'], check=True, stdout=subprocess.PIPE) e_pkgs =", "return answer def get_qcodes_version() -> str: \"\"\" Get the version", "requires \"\"\" qc_pkg = distribution('qcodes').requires if qc_pkg is None: return", "req in req_names: try: req_versions[req] = version(req) except PackageNotFoundError: req_versions[req]", "the answer a boolean. Returns None if pip somehow did", "installed along with QCoDeS \"\"\" import sys from typing import", "installed versions of the packages that QCoDeS requires. The dict" ]
[ "fieldname=fieldname, ) if query: q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter(", "FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif request.method == \"POST\":", "= request.POST['fieldname'] value = request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname,", "properly fieldname = request.POST['fieldname'] value = request.POST['value'] fc = FieldCategory.objects.get(", ") return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just let it", "fieldname = request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname, )", "# just let it explode if people don't POST properly", "it explode if people don't POST properly fieldname = request.POST['fieldname']", "import FieldCategory def fieldname_values(request): if request.method == \"GET\": fieldname =", "if query: q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value')", "people don't POST properly fieldname = request.POST['fieldname'] value = request.POST['value']", "from django.shortcuts import render from django.http import JsonResponse from .models", "\"GET\": fieldname = request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname,", "= FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'}) def fieldname_value_count(request):", "**q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif request.method == \"POST\": fieldname", "fieldname_value_count(request): # just let it explode if people don't POST", "JsonResponse from .models import FieldCategory def fieldname_values(request): if request.method ==", "= FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count += 1 fc.save() return", "import render from django.http import JsonResponse from .models import FieldCategory", "query: q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return", "value = request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count", "== \"POST\": fieldname = request.POST['fieldname'] value = request.POST['value'] fc, created", "q_kwargs= dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains'] = query fc", "fieldname=fieldname, value=value ) fc.count += 1 fc.save() return JsonResponse({'status': 'ok'})", ".models import FieldCategory def fieldname_values(request): if request.method == \"GET\": fieldname", "= query fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False)", "= request.POST['fieldname'] value = request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value", "request.method == \"POST\": fieldname = request.POST['fieldname'] value = request.POST['value'] fc,", "elif request.method == \"POST\": fieldname = request.POST['fieldname'] value = request.POST['value']", "from django.http import JsonResponse from .models import FieldCategory def fieldname_values(request):", "query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains']", "= FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif request.method ==", "request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains'] = query", "fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'})", "FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count += 1 fc.save() return JsonResponse({'status':", "fieldname = request.POST['fieldname'] value = request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname,", "django.shortcuts import render from django.http import JsonResponse from .models import", "from .models import FieldCategory def fieldname_values(request): if request.method == \"GET\":", "let it explode if people don't POST properly fieldname =", "JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just let it explode if", "dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains'] = query fc =", "fieldname = request.POST['fieldname'] value = request.POST['value'] fc, created = FieldCategory.objects.get_or_create(", "request.POST['fieldname'] value = request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value )", "FieldCategory def fieldname_values(request): if request.method == \"GET\": fieldname = request.GET['fieldname']", "safe=False) elif request.method == \"POST\": fieldname = request.POST['fieldname'] value =", "fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif request.method", "= request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return", "= request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if", "def fieldname_value_count(request): # just let it explode if people don't", "explode if people don't POST properly fieldname = request.POST['fieldname'] value", "return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just let it explode", ").order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif request.method == \"POST\": fieldname =", "request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if query:", "fieldname_values(request): if request.method == \"GET\": fieldname = request.GET['fieldname'] query =", "q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc),", ") if query: q_kwargs['value__icontains'] = query fc = FieldCategory.objects.filter( **q_kwargs", "don't POST properly fieldname = request.POST['fieldname'] value = request.POST['value'] fc", "django.http import JsonResponse from .models import FieldCategory def fieldname_values(request): if", "value = request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value )", "just let it explode if people don't POST properly fieldname", "== \"GET\": fieldname = request.GET['fieldname'] query = request.GET.get('q') q_kwargs= dict(", "render from django.http import JsonResponse from .models import FieldCategory def", "FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): #", "<reponame>brandonrobertz/foia-pdf-processing-system<filename>documents/views.py from django.shortcuts import render from django.http import JsonResponse from", "created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'}) def", "if people don't POST properly fieldname = request.POST['fieldname'] value =", "request.POST['fieldname'] value = request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value", "= request.GET.get('q') q_kwargs= dict( fieldname=fieldname, ) if query: q_kwargs['value__icontains'] =", "fieldname=fieldname, value=value ) return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just", "return JsonResponse(list(fc), safe=False) elif request.method == \"POST\": fieldname = request.POST['fieldname']", "fc = FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count += 1 fc.save()", "= request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count +=", "def fieldname_values(request): if request.method == \"GET\": fieldname = request.GET['fieldname'] query", "request.POST['value'] fc, created = FieldCategory.objects.get_or_create( fieldname=fieldname, value=value ) return JsonResponse({'status':", "\"POST\": fieldname = request.POST['fieldname'] value = request.POST['value'] fc, created =", "request.method == \"GET\": fieldname = request.GET['fieldname'] query = request.GET.get('q') q_kwargs=", "request.POST['value'] fc = FieldCategory.objects.get( fieldname=fieldname, value=value ) fc.count += 1", "query fc = FieldCategory.objects.filter( **q_kwargs ).order_by(\"-count\").values('value') return JsonResponse(list(fc), safe=False) elif", "POST properly fieldname = request.POST['fieldname'] value = request.POST['value'] fc =", "'ok'}) def fieldname_value_count(request): # just let it explode if people", "value=value ) return JsonResponse({'status': 'ok'}) def fieldname_value_count(request): # just let", "if request.method == \"GET\": fieldname = request.GET['fieldname'] query = request.GET.get('q')", "JsonResponse(list(fc), safe=False) elif request.method == \"POST\": fieldname = request.POST['fieldname'] value", "import JsonResponse from .models import FieldCategory def fieldname_values(request): if request.method" ]
[ "tests/test_provider_Mongey_kafka-connect.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def", "def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut imports", "# # import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() #", "= terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect'", "def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector", "block. # def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # #", "providers. # TODO: This has to be moved into a", "a required_providers block. # def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect", "# # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) #", "t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # # assert", "Shortcut imports without namespace for official and supported providers. #", "test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector #", "into a required_providers block. # def test_version_source(): # # import", "# def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # # t", "def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # # t =", "official and supported providers. # TODO: This has to be", "moved into a required_providers block. # def test_version_source(): # #", "import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s =", "to be moved into a required_providers block. # def test_version_source():", "supported providers. # TODO: This has to be moved into", "test_version_source(): # # import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect()", "kafka_connect_connector # TODO: Shortcut imports without namespace for official and", "test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut imports without", "Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import(): import", "= str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert", "# # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert '0.2.3' in", "for official and supported providers. # TODO: This has to", "# s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s", "# TODO: Shortcut imports without namespace for official and supported", "tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import():", "namespace for official and supported providers. # TODO: This has", "# tests/test_provider_Mongey_kafka-connect.py # Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC)", "(24-Sep-2021 15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from", "imports without namespace for official and supported providers. # TODO:", "TODO: Shortcut imports without namespace for official and supported providers.", "This has to be moved into a required_providers block. #", "import kafka_connect_connector # TODO: Shortcut imports without namespace for official", "s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s #", "from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut imports without namespace", "# assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert '0.2.3' in s", "# TODO: This has to be moved into a required_providers", "terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in", "by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def", "without namespace for official and supported providers. # TODO: This", "TODO: This has to be moved into a required_providers block.", "15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect", "# Automatically generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import():", "terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut imports without namespace for", "and supported providers. # TODO: This has to be moved", "terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t)", "str(t) # # assert 'https://github.com/Mongey/terraform-provider-kafka-connect' in s # assert '0.2.3'", "import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO:", "be moved into a required_providers block. # def test_version_source(): #", "# t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s = str(t) # #", "UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import", "# import terrascript.provider.Mongey.kafka_connect # # t = terrascript.provider.Mongey.kafka_connect.kafka_connect() # s", "generated by tools/makecode.py (24-Sep-2021 15:20:11 UTC) def test_provider_import(): import terrascript.provider.Mongey.kafka_connect", "has to be moved into a required_providers block. # def", "terrascript.provider.Mongey.kafka_connect def test_resource_import(): from terrascript.resource.Mongey.kafka_connect import kafka_connect_connector # TODO: Shortcut", "required_providers block. # def test_version_source(): # # import terrascript.provider.Mongey.kafka_connect #" ]
[ "\"width: 300px; height: 300px; border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">'", "asc from sqlalchemy.orm import sessionmaker from models import Base, Category,", "token in session user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token']", "from flask import session as login_session from sqlalchemy import create_engine,", "i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first()", "and create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine", "# Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token':", "# Show an item in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category,", "API to view entire catalog Information.\"\"\" return render_template('login.html') # Third", "Show Items in a category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory", "Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves", "make_response from flask import session as login_session from sqlalchemy import", "flash, make_response from flask import session as login_session from sqlalchemy", "# Show Items in a category item @app.route('/catalog/<category>/') def showCatalogCategory(category):", "access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get user info userinfo_url =", "= 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http() result", "if getUserID(login_session['email']) is not None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session)", "handler @app.route('/login') def showLogin(): \"\"\"JSON API to view entire catalog", "in login_session: return render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html', categories=categories,", "Items in a category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory =", "are not authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name))", "categories = session.query(Category).all() if 'token' not in login_session: return render_template('publiccategoryitem.html',", "if login_session['provider'] == 'facebook': del login_session['facebook_id'] del login_session['access_token'] del login_session['username']", "= data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks if user exists", "else: return render_template('category.html', items=items, category=itemCategory, categories=categories) # Show an item", "methods=['GET', 'POST']) def deleteCategoryItem(category, item): if 'token' not in login_session:", "'POST']) def editCategoryItem(category, item): if 'token' not in login_session: return", "categories=categories) # Show an item in a category @app.route('/catalog/<category>/<item>/') def", "from oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2", "and secrets from the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r')", "engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session", "'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h = httplib2.Http()", "category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item): if 'token'", "session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories and", "i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items", "'google': code = request.data try: # Upgrade auth code into", "Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name,", "= session.query(Category).all() if 'token' not in login_session: return render_template('publiccategoryitem.html', item=item,", "output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src=\"{}\" '.format(login_session['picture']) output", "error handling if result.get('error') is not None: response = make_response(json.dumps(result.get('error')),", "authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method", "import requests app = Flask(__name__) # Retrieves client ID's and", "methods=['GET', 'POST']) def editCategoryItem(category, item): if 'token' not in login_session:", "credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials", "login_session['access_token'] del login_session['username'] del login_session['picture'] del login_session['email'] del login_session['token'] flash(\"You", "user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt':", "items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item =", "item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if 'token' not", "from the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID", "to view entire catalog Information.\"\"\" return render_template('login.html') # Third Party", "valid access token access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\", "session user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] = token", "in session user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] =", "FlowExchangeError import httplib2 import json import requests app = Flask(__name__)", "provider == 'google': code = request.data try: # Upgrade auth", "code into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri =", "scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response", "object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials =", "Retrieves client ID's and secrets from the json files CLIENT_ID", "item): if 'token' not in login_session: return redirect('/login') user =", "are not authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name))", "is not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = '", "login_session['gplus_id'] = credentials.id_token['sub'] # Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo'", "else: flash(\"No user has been logged in.\") return redirect(url_for('showCatalog')) #", "= make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json' return response #", "in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items =", "request, redirect, jsonify, g from flask import url_for, flash, make_response", "has been logged in.\") return redirect(url_for('showCatalog')) # JSON APIs to", "access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1])", "height: 300px; border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged", "c in categories], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON') def", "data = result login_session['access_token'] = access_token login_session['provider'] = 'facebook' login_session['username']", "'access_token={}'.format(access_token) h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Access", "# Strip expire tag from access token access_token = result['access_token']", "{}!</h1>'.format(login_session['username']) output += '<img src=\"{}\" '.format(login_session['picture']) output += 'style =", "render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a category item", "Retrieves provider to process oauth login. params:(string) oauth provider \"\"\"", "+= '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src=\"{}\" '.format(login_session['picture']) output +=", "session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session: return", "else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__': app.secret_key", "url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) #", "category=category, categories=categories) # Create a new item @app.route('/catalog/category/new/', methods=['GET', 'POST'])", "user_id=user.id) session.add(newItem) session.commit() flash('New Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog'))", "= data['name'] login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] =", "APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database and", "= 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response(", "# Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\"", "= result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa h", "category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__': app.secret_key = 'N10kuN!' app.debug", "json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database and create database", "Connect to Database and create database session engine = create_engine('sqlite:///catalog.db')", "'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1])", "'style = \"width: 300px; height: 300px; border-radius: 150px;' \\ '-webkit-border-radius:", "if request.method == 'POST': if request.form['name']: editedItem.name = request.form['name'] if", "item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if 'token' not in", "login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category,", "url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http()", "return render_template('categoryitem.html', item=item, category=category, categories=categories) # Create a new item", "Item=[item.serialize]) # Show all Categories and the latest items @app.route('/')", "= session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first()", "= session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in", "'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h = httplib2.Http() result =", "if 'token' not in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first()", "None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json' return", "app = Flask(__name__) # Retrieves client ID's and secrets from", "Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = DBSession() #", "info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt': 'json'}", "else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a", "Successfully Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if", "item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all", "items=items) # Show Items in a category item @app.route('/catalog/<category>/') def", "user info data = result login_session['access_token'] = access_token login_session['provider'] =", "redirect(url_for('showCatalog')) else: flash(\"No user has been logged in.\") return redirect(url_for('showCatalog'))", "@app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item,", "def editCategoryItem(category, item): if 'token' not in login_session: return redirect('/login')", "response = make_response( json.dumps('Failed to upgrade the authorization code.'), 401)", "editedItem.name = request.form['name'] if request.form['description']: editedItem.description = request.form['description'] if request.form['category']:", "Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item):", "= category.id session.add(editedItem) session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'],", "the latest items @app.route('/') @app.route('/catalog') def showCatalog(): categories = session.query(Category).all()", "redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by(", "user = session.query(User).filter_by(email=email).one() return user.id except: return None # Revoke", "return render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html', categories=categories, items=items) #", "import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2 import json", "session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] = token output = ''", "'<img src=\"{}\" '.format(login_session['picture']) output += 'style = \"width: 300px; height:", "if provider == 'google': code = request.data try: # Upgrade", "category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item {} Successfully Added'.format(newItem.name)) return", "if user.id != editedItem.user_id: flash('You are not authorized to edit", "= session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item Successfully", "not authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if", "in login_session: if login_session['provider'] == 'google': del login_session['gplus_id'] if login_session['provider']", "= session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON')", "flask import Flask, render_template, request, redirect, jsonify, g from flask", "logged out.\") return redirect(url_for('showCatalog')) else: flash(\"No user has been logged", "categories=categories, items=items) else: return render_template('catalog.html', categories=categories, items=items) # Show Items", "Create a new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if", "= token output = '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output", "picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return", "def logout(): if 'provider' in login_session: if login_session['provider'] == 'google':", "noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Get", "@app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves provider to process oauth", "Categories and the latest items @app.route('/') @app.route('/catalog') def showCatalog(): categories", "del login_session['access_token'] del login_session['username'] del login_session['picture'] del login_session['email'] del login_session['token']", "logged in as {}'.format(login_session['username'])) return output def createUser(login_session): newUser =", "'application/json' return response # Check for valid access token access_token", "render_template('categoryitem.html', item=item, category=category, categories=categories) # Create a new item @app.route('/catalog/category/new/',", "return render_template('category.html', items=items, category=itemCategory, categories=categories) # Show an item in", "httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Access token error handling", "Checks if user exists in DB if getUserID(login_session['email']) is not", "= request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET,", "item in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category =", "request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token)", "300px; height: 300px; border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now", "methods=['GET', 'POST']) def newCategoryItem(): if 'token' not in login_session: return", "return redirect(url_for('showCatalog')) # JSON APIs to view Category Information. @app.route('/catalog/JSON')", "as login_session from sqlalchemy import create_engine, asc from sqlalchemy.orm import", "'' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src=\"{}\" '.format(login_session['picture'])", "category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item =", "# Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category,", "jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize for i in items])", "!= itemToDelete.user_id: flash('You are not authorized to delete {}.'.format(item)) return", "catalog Information.\"\"\" return render_template('login.html') # Third Party Oauth callback @app.route('/oauth/<provider>',", "= data['id'] # Checks if user exists in DB if", "'token' not in login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else:", "@app.route('/') @app.route('/catalog') def showCatalog(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3)", "user.id except: return None # Revoke current user's token and", "import Base, Category, Item, User from oauth2client.client import flow_from_clientsecrets from", "session.query(Category).all() if 'token' not in login_session: return render_template('publiccategory.html', items=items, category=itemCategory,", "flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name)", "response.headers['Content-Type'] = ' application/json' return response # Store access token", "editCategoryItem(category, item): if 'token' not in login_session: return redirect('/login') user", "= request.form['description'] if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id =", "= credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h = httplib2.Http()", "# noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) #", "data['email'] elif provider == 'facebook': access_token = request.data url =", "request.form['name'] if request.form['description']: editedItem.description = request.form['description'] if request.form['category']: category =", "items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize", "# Access token error handling if result.get('error') is not None:", "if result.get('error') is not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type']", "= 'facebook' login_session['username'] = data['name'] login_session['email'] = data['email'] login_session['picture'] =", "@app.route('/catalog') def showCatalog(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if", "not in login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return", "import create_engine, asc from sqlalchemy.orm import sessionmaker from models import", "+= 'style = \"width: 300px; height: 300px; border-radius: 150px;' \\", "categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in", "from oauth2client.client import FlowExchangeError import httplib2 import json import requests", "sessionmaker(bind=engine) session = DBSession() # Login handler @app.route('/login') def showLogin():", "oauth login. params:(string) oauth provider \"\"\" if provider == 'google':", "'.format(login_session['picture']) output += 'style = \"width: 300px; height: 300px; border-radius:", "Revoke current user's token and reset login_session @app.route('/logout') def logout():", "'GET')[1]) # Strip expire tag from access token access_token =", "reset login_session @app.route('/logout') def logout(): if 'provider' in login_session: if", "if 'token' not in login_session: return redirect('/login') categories = session.query(Category).all()", "item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted')", "flask import session as login_session from sqlalchemy import create_engine, asc", "if 'token' not in login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories)", "redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) # Edit a category item", "itemToDelete.user_id: flash('You are not authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem',", "del login_session['username'] del login_session['picture'] del login_session['email'] del login_session['token'] flash(\"You have", "been successfully logged out.\") return redirect(url_for('showCatalog')) else: flash(\"No user has", "upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response", "not in login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html',", "\\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h = httplib2.Http() result", "showLogin(): \"\"\"JSON API to view entire catalog Information.\"\"\" return render_template('login.html')", "the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response #", "Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item):", "category = session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id)", "from sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker from", "# Create a new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem():", "user has been logged in.\") return redirect(url_for('showCatalog')) # JSON APIs", "import httplib2 import json import requests app = Flask(__name__) #", "request.method == 'POST': if request.form['name']: editedItem.name = request.form['name'] if request.form['description']:", "= session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first()", "response # Store access token in session login_session['provider'] = 'google'", "access token access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token)", "= session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] = token output =", "Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'],", "noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Strip", "token = user.generate_auth_token(600) login_session['token'] = token output = '' output", "Information. @app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3)", "session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token' not", "None # Revoke current user's token and reset login_session @app.route('/logout')", "del login_session['picture'] del login_session['email'] del login_session['token'] flash(\"You have been successfully", "Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all() items =", "session.add(newItem) session.commit() flash('New Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else:", "to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method ==", "session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token' not in login_session: return", "exists in DB if getUserID(login_session['email']) is not None: login_session['user_id'] =", "= session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id !=", "for i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first()", "'GET')[1]) # Access token error handling if result.get('error') is not", "json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] = data['picture'] login_session['email'] = data['email']", "return user.id except: return None # Revoke current user's token", "for i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory =", "if request.method == 'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem =", "== 'facebook': access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&'", "Get user info data = result login_session['access_token'] = access_token login_session['provider']", "# Retrieves client ID's and secrets from the json files", "item=itemToDelete.name) if __name__ == '__main__': app.secret_key = 'N10kuN!' app.debug =", "session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine)", "data['name'] login_session['picture'] = data['picture'] login_session['email'] = data['email'] elif provider ==", "user.generate_auth_token(600) login_session['token'] = token output = '' output += '<h1>Welcome,", "'POST']) def newCategoryItem(): if 'token' not in login_session: return redirect('/login')", "'r') .read())['web']['app_secret'] # Connect to Database and create database session", "session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in", "# Connect to Database and create database session engine =", "category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You are not authorized to", "category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories and the", "redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by(", "category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items =", "expire tag from access token access_token = result['access_token'] url =", "item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item): if 'token' not", "flash('New Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html',", "150px;-moz-border-radius: 150px;\">' flash('Now logged in as {}'.format(login_session['username'])) return output def", "'<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src=\"{}\" '.format(login_session['picture']) output += 'style", "__name__ == '__main__': app.secret_key = 'N10kuN!' app.debug = True app.run(host='0.0.0.0',", "access_token login_session['provider'] = 'facebook' login_session['username'] = data['name'] login_session['email'] = data['email']", "login_session['gplus_id'] if login_session['provider'] == 'facebook': del login_session['facebook_id'] del login_session['access_token'] del", "= data['name'] login_session['picture'] = data['picture'] login_session['email'] = data['email'] elif provider", "def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize],", "requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] =", "is not None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] =", "request.form['description'] if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id", "session as login_session from sqlalchemy import create_engine, asc from sqlalchemy.orm", "data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks if", "session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit()", "= flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except", "editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem',", "an item in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category", "return None # Revoke current user's token and reset login_session", "make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json' return response # Store", "categories=categories, items=items) # Show Items in a category item @app.route('/catalog/<category>/')", "= make_response( json.dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type']", "to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method ==", "name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You are not authorized", "access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h =", "return response # Store access token in session login_session['provider'] =", "Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__", "except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the authorization", "process oauth login. params:(string) oauth provider \"\"\" if provider ==", "= httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Get user info", "= requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username'] = data['name'] login_session['picture']", "'facebook': access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\", "oauth provider \"\"\" if provider == 'google': code = request.data", "= access_token login_session['provider'] = 'facebook' login_session['username'] = data['name'] login_session['email'] =", "to upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return", "json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET =", "return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return render_template('category.html', items=items, category=itemCategory,", "flash('You are not authorized to delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name,", "redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item", "else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores token in session", "params=params) data = json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] = data['picture']", "login_session['provider'] == 'google': del login_session['gplus_id'] if login_session['provider'] == 'facebook': del", "tag from access token access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields='", "session login_session['provider'] = 'google' login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub']", "files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r')", "item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete", "requests app = Flask(__name__) # Retrieves client ID's and secrets", "else: return render_template('newcategoryitem.html', categories=categories) # Edit a category item @app.route('/catalog/<category>/<item>/edit',", "== '__main__': app.secret_key = 'N10kuN!' app.debug = True app.run(host='0.0.0.0', port=5000)", "except: return None # Revoke current user's token and reset", "session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id != editedItem.user_id:", "= '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img src=\"{}\"", "{'access_token': login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url, params=params) data =", "return response # Check for valid access token access_token =", "= sessionmaker(bind=engine) session = DBSession() # Login handler @app.route('/login') def", "login_session['provider'] = 'google' login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub'] #", "itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if", "@app.route('/logout') def logout(): if 'provider' in login_session: if login_session['provider'] ==", "Base, Category, Item, User from oauth2client.client import flow_from_clientsecrets from oauth2client.client", "json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect", "category=itemCategory, categories=categories) else: return render_template('category.html', items=items, category=itemCategory, categories=categories) # Show", "import session as login_session from sqlalchemy import create_engine, asc from", "categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories =", "def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first()", ".read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database", "\"\"\" Retrieves provider to process oauth login. params:(string) oauth provider", "into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage'", "del login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del login_session['picture'] del login_session['email']", "Upgrade auth code into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')", "item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories) # Create", "render_template, request, redirect, jsonify, g from flask import url_for, flash,", "Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) # Edit", "return jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize for i in", "category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) #", "for valid access token access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?'", "else: return render_template('catalog.html', categories=categories, items=items) # Show Items in a", "code = request.data try: # Upgrade auth code into credentials", "access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID,", "successfully logged out.\") return redirect(url_for('showCatalog')) else: flash(\"No user has been", "redirect(url_for('showCatalog')) # JSON APIs to view Category Information. @app.route('/catalog/JSON') def", "item): category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories =", "category.id session.add(editedItem) session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name))", "items @app.route('/') @app.route('/catalog') def showCatalog(): categories = session.query(Category).all() items =", "result.get('error') is not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] =", "Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories)", "category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item", "provider to process oauth login. params:(string) oauth provider \"\"\" if", "a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item", "'GET')[1]) # Get user info data = result login_session['access_token'] =", "= session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i", "FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the authorization code.'),", "def catalogJSON(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize", "not authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if", "session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html',", "json.loads(h.request(url, 'GET')[1]) # Access token error handling if result.get('error') is", "login_session['username'] = data['name'] login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id']", "new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if 'token' not", "categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return", "oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response =", "session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id except:", "from sqlalchemy.orm import sessionmaker from models import Base, Category, Item,", "description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item {} Successfully Added'.format(newItem.name))", "= json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to Database and create", "handling if result.get('error') is not None: response = make_response(json.dumps(result.get('error')), 500)", "in login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return render_template('category.html',", "session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in categories],", "url_for, flash, make_response from flask import session as login_session from", "items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items])", "render_template('catalog.html', categories=categories, items=items) # Show Items in a category item", "render_template('login.html') # Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider):", "= json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET", "Flask, render_template, request, redirect, jsonify, g from flask import url_for,", ".read())['web']['app_secret'] # Connect to Database and create database session engine", "return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method == 'POST': if request.form['name']:", "= data['email'] elif provider == 'facebook': access_token = request.data url", "not in login_session: return redirect('/login') categories = session.query(Category).all() user =", "authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method", "been logged in.\") return redirect(url_for('showCatalog')) # JSON APIs to view", "item=item, category=category, categories=categories) # Create a new item @app.route('/catalog/category/new/', methods=['GET',", "\\ 'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url,", "delete {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST':", "login_session: return redirect('/login') categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if", "response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json' return response", "= json.loads(h.request(url, 'GET')[1]) # Get user info data = result", "@app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return", "login_session from sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker", "have been successfully logged out.\") return redirect(url_for('showCatalog')) else: flash(\"No user", "authorization code.'), 401) response.headers['Content-Type'] = 'application/json' return response # Check", "request.method == 'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'],", "all Categories and the latest items @app.route('/') @app.route('/catalog') def showCatalog():", "categories=categories, editedItem=editedItem) # Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST'])", "return redirect(url_for('showCatalog')) else: flash(\"No user has been logged in.\") return", "\\ 'access_token={}'.format(access_token) h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) #", "Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory", "token in session login_session['provider'] = 'google' login_session['access_token'] = access_token login_session['gplus_id']", "categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST':", "getUserID(login_session['email']) # Stores token in session user = session.query(User).filter_by(email=login_session['email']).first() token", "500) response.headers['Content-Type'] = ' application/json' return response # Store access", "result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa h =", "categories], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory", "sqlalchemy import create_engine, asc from sqlalchemy.orm import sessionmaker from models", "session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category =", "= oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade", "user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item,", "to view Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all()", "if request.form['description']: editedItem.description = request.form['description'] if request.form['category']: category = session.query(Category).filter_by(", "catalogJSON(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for", "items=items) else: return render_template('catalog.html', categories=categories, items=items) # Show Items in", "# Revoke current user's token and reset login_session @app.route('/logout') def", "redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem)", "create_engine, asc from sqlalchemy.orm import sessionmaker from models import Base,", "httplib2 import json import requests app = Flask(__name__) # Retrieves", "# Check for valid access token access_token = credentials.access_token url", "return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category,", "make_response( json.dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type'] =", "= json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] #", "login_session['username'] = data['name'] login_session['picture'] = data['picture'] login_session['email'] = data['email'] elif", "DBSession = sessionmaker(bind=engine) session = DBSession() # Login handler @app.route('/login')", "auth code into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri", "logout(): if 'provider' in login_session: if login_session['provider'] == 'google': del", "engine DBSession = sessionmaker(bind=engine) session = DBSession() # Login handler", "'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http() result =", "session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if 'token'", "session.query(User).filter_by(email=email).one() return user.id except: return None # Revoke current user's", "category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item): if 'token'", "a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item): if", "in categories], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category):", "items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token' not in", "current user's token and reset login_session @app.route('/logout') def logout(): if", "jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item):", "flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html',", "newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item", "in login_session: return redirect('/login') categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one()", "output += '<img src=\"{}\" '.format(login_session['picture']) output += 'style = \"width:", "latest items @app.route('/') @app.route('/catalog') def showCatalog(): categories = session.query(Category).all() items", "request.data try: # Upgrade auth code into credentials object oauth_flow", "g from flask import url_for, flash, make_response from flask import", "APP_SECRET, access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url,", "oauth2client.client import FlowExchangeError import httplib2 import json import requests app", "flask import url_for, flash, make_response from flask import session as", "DBSession() # Login handler @app.route('/login') def showLogin(): \"\"\"JSON API to", "token output = '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output +=", "= session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session: return render_template('publiccatalog.html', categories=categories,", "item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item): if 'token' not", "category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all()", "if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem)", "= json.loads(h.request(url, 'GET')[1]) # Strip expire tag from access token", "'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h", "session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if 'token' not in login_session:", "= session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session:", "'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( json.dumps('Failed", "return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories)", "150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged in as {}'.format(login_session['username']))", "# Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category,", "src=\"{}\" '.format(login_session['picture']) output += 'style = \"width: 300px; height: 300px;", "return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete =", "= data['picture'] login_session['email'] = data['email'] elif provider == 'facebook': access_token", "oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the", "'provider' in login_session: if login_session['provider'] == 'google': del login_session['gplus_id'] if", "category_id=category.id).first() categories = session.query(Category).all() if 'token' not in login_session: return", "provider \"\"\" if provider == 'google': code = request.data try:", "jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories and the latest items", "border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged in as", "session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name,", "application/json' return response # Store access token in session login_session['provider']", "output = '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username']) output += '<img", "user's token and reset login_session @app.route('/logout') def logout(): if 'provider'", "import sessionmaker from models import Base, Category, Item, User from", "if user exists in DB if getUserID(login_session['email']) is not None:", "from models import Base, Category, Item, User from oauth2client.client import", "callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves provider to process", "return render_template('login.html') # Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def", "= getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores token", "session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize for i", "APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret']", "login_session: if login_session['provider'] == 'google': del login_session['gplus_id'] if login_session['provider'] ==", "h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Strip expire", "= httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Strip expire tag", "= session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category", "return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem =", "+= '<img src=\"{}\" '.format(login_session['picture']) output += 'style = \"width: 300px;", "catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize", "def oauthLogin(provider): \"\"\" Retrieves provider to process oauth login. params:(string)", "session = DBSession() # Login handler @app.route('/login') def showLogin(): \"\"\"JSON", "credentials.id_token['sub'] # Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params =", "session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id:", "= session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c in categories], Items=[i.serialize for", "if 'provider' in login_session: if login_session['provider'] == 'google': del login_session['gplus_id']", "= session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token' not in login_session:", "'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id,", "not None: response = make_response(json.dumps(result.get('error')), 500) response.headers['Content-Type'] = ' application/json'", "access token in session login_session['provider'] = 'google' login_session['access_token'] = access_token", "login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get user info", "in.\") return redirect(url_for('showCatalog')) # JSON APIs to view Category Information.", "category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id != editedItem.user_id: flash('You are", "= session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem)", "@app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if 'token' not in login_session:", "not in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem =", "= httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Access token error", "# Stores token in session user = session.query(User).filter_by(email=login_session['email']).first() token =", "login. params:(string) oauth provider \"\"\" if provider == 'google': code", "401) response.headers['Content-Type'] = 'application/json' return response # Check for valid", "session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if", "database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession =", "login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores", "answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username'] = data['name']", "= session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) #", "credentials = oauth_flow.step2_exchange(code) except FlowExchangeError: response = make_response( json.dumps('Failed to", "output def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit()", "not in login_session: return render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html',", "session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories", "jsonify, g from flask import url_for, flash, make_response from flask", "= 'google' login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get", "== 'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'],", "view Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories = session.query(Category).all() items", "code.'), 401) response.headers['Content-Type'] = 'application/json' return response # Check for", "def newCategoryItem(): if 'token' not in login_session: return redirect('/login') categories", "session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show", "Category, Item, User from oauth2client.client import flow_from_clientsecrets from oauth2client.client import", "user = session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category = session.query(Category).filter_by(", "import json import requests app = Flask(__name__) # Retrieves client", "editedItem.user_id: flash('You are not authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem',", "150px;\">' flash('Now logged in as {}'.format(login_session['username'])) return output def createUser(login_session):", "login_session @app.route('/logout') def logout(): if 'provider' in login_session: if login_session['provider']", "a new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def newCategoryItem(): if 'token'", "return redirect('/login') categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method", "if __name__ == '__main__': app.secret_key = 'N10kuN!' app.debug = True", "edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method == 'POST':", "{}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method == 'POST': if", "session.add(editedItem) session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else:", "render_template('newcategoryitem.html', categories=categories) # Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST'])", "Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name,", "category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a category item @app.route('/catalog/<category>/<item>/delete',", "= ' application/json' return response # Store access token in", "login_session['token'] flash(\"You have been successfully logged out.\") return redirect(url_for('showCatalog')) else:", "newCategoryItem(): if 'token' not in login_session: return redirect('/login') categories =", "login_session: return render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html', categories=categories, items=items)", "= session.query(Category).all() if user.id != editedItem.user_id: flash('You are not authorized", "models import Base, Category, Item, User from oauth2client.client import flow_from_clientsecrets", "if request.form['name']: editedItem.name = request.form['name'] if request.form['description']: editedItem.description = request.form['description']", "return redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ ==", "email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one()", "name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item Successfully Edited') return", "= user.generate_auth_token(600) login_session['token'] = token output = '' output +=", "\\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged in as {}'.format(login_session['username'])) return", "user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600) login_session['token'] = token output", "json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json',", "'POST': if request.form['name']: editedItem.name = request.form['name'] if request.form['description']: editedItem.description =", "showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories", "json.loads(h.request(url, 'GET')[1]) # Get user info data = result login_session['access_token']", "not None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email'])", "login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() editedItem", "== 'POST': if request.form['name']: editedItem.name = request.form['name'] if request.form['description']: editedItem.description", "view entire catalog Information.\"\"\" return render_template('login.html') # Third Party Oauth", "if user.id != itemToDelete.user_id: flash('You are not authorized to delete", "createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores token in session user", "data['name'] login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id']", "'google': del login_session['gplus_id'] if login_session['provider'] == 'facebook': del login_session['facebook_id'] del", "session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category = session.query(Category).filter_by( name=request.form['category']).first() newItem", "del login_session['gplus_id'] if login_session['provider'] == 'facebook': del login_session['facebook_id'] del login_session['access_token']", "elif provider == 'facebook': access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type='", "' application/json' return response # Store access token in session", "json.dumps('Failed to upgrade the authorization code.'), 401) response.headers['Content-Type'] = 'application/json'", "Flask(__name__) # Retrieves client ID's and secrets from the json", "params = {'access_token': login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url, params=params)", "editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id", "redirect(url_for('showCategoryItem', category=categoryItem.name, item=editedItem.name)) if request.method == 'POST': if request.form['name']: editedItem.name", "getUserID(login_session['email']) is not None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id']", "{}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete)", "data = json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] = data['picture'] login_session['email']", "result login_session['access_token'] = access_token login_session['provider'] = 'facebook' login_session['username'] = data['name']", "Item, User from oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError", "itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for", "client ID's and secrets from the json files CLIENT_ID =", "in a category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first()", "name=request.form['category']).first() newItem = Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New", "provider == 'facebook': access_token = request.data url = 'https://graph.facebook.com/oauth/access_token?grant_type=' \\", "login_session['access_token'] = access_token login_session['provider'] = 'facebook' login_session['username'] = data['name'] login_session['email']", "= 'application/json' return response # Check for valid access token", "\"\"\" if provider == 'google': code = request.data try: #", "login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks if user", "oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2 import", "oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code)", "login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return render_template('category.html', items=items,", "category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully", "create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession", "render_template('publiccategory.html', items=items, category=itemCategory, categories=categories) else: return render_template('category.html', items=items, category=itemCategory, categories=categories)", "sqlalchemy.orm import sessionmaker from models import Base, Category, Item, User", ".read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r')", "== 'google': code = request.data try: # Upgrade auth code", "'json'} answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username'] =", "= {'access_token': login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url, params=params) data", "login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id'] #", "data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks if user exists in", "@app.route('/login') def showLogin(): \"\"\"JSON API to view entire catalog Information.\"\"\"", "and the latest items @app.route('/') @app.route('/catalog') def showCatalog(): categories =", "entire catalog Information.\"\"\" return render_template('login.html') # Third Party Oauth callback", "item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all()", "def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first()", "'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else: return", "h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Access token", "== 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog')) else:", "'token' not in login_session: return redirect('/login') categories = session.query(Category).all() user", "session.commit() flash('Item Successfully Edited') return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return", "result = json.loads(h.request(url, 'GET')[1]) # Access token error handling if", "categories = session.query(Category).all() if user.id != editedItem.user_id: flash('You are not", "'-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged in as {}'.format(login_session['username'])) return output", "session.query(Category).all() if 'token' not in login_session: return render_template('publiccategoryitem.html', item=item, category=category,", "request.form['name']: editedItem.name = request.form['name'] if request.form['description']: editedItem.description = request.form['description'] if", "= access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get user info userinfo_url", "credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h = httplib2.Http() result", "def showLogin(): \"\"\"JSON API to view entire catalog Information.\"\"\" return", "= json.loads(answer.text) login_session['username'] = data['name'] login_session['picture'] = data['picture'] login_session['email'] =", "createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email):", "data['picture'] login_session['email'] = data['email'] elif provider == 'facebook': access_token =", "categories = session.query(Category).all() if 'token' not in login_session: return render_template('publiccategory.html',", "in session login_session['provider'] = 'google' login_session['access_token'] = access_token login_session['gplus_id'] =", "logged in.\") return redirect(url_for('showCatalog')) # JSON APIs to view Category", "def showCatalog(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token'", "to Database and create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind", "if 'token' not in login_session: return render_template('publiccategory.html', items=items, category=itemCategory, categories=categories)", "httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Get user info data", "login_session['picture'] del login_session['email'] del login_session['token'] flash(\"You have been successfully logged", "= Flask(__name__) # Retrieves client ID's and secrets from the", "del login_session['email'] del login_session['token'] flash(\"You have been successfully logged out.\")", "secrets from the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id']", "as {}'.format(login_session['username'])) return output def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'],", "@app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def deleteCategoryItem(category, item): if 'token' not in", "Check for valid access token access_token = credentials.access_token url =", "'alt': 'json'} answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text) login_session['username']", "del login_session['token'] flash(\"You have been successfully logged out.\") return redirect(url_for('showCatalog'))", "'token' not in login_session: return render_template('publiccatalog.html', categories=categories, items=items) else: return", "@app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item): if 'token' not in", "@app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories", "session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You are not", "request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit()", "showCatalog(): categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not", "Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory =", "@app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item,", "= 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h = httplib2.Http() result = json.loads(h.request(url,", "items = session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session: return render_template('publiccatalog.html',", "== 'facebook': del login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del login_session['picture']", "userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt': 'json'} answer", "getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id except: return None", "request.form['description']: editedItem.description = request.form['description'] if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first()", "Stores token in session user = session.query(User).filter_by(email=login_session['email']).first() token = user.generate_auth_token(600)", "= getUserID(login_session['email']) # Stores token in session user = session.query(User).filter_by(email=login_session['email']).first()", "h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Get user", "flow_from_clientsecrets('client_secrets.json', scope='') oauth_flow.redirect_uri = 'postmessage' credentials = oauth_flow.step2_exchange(code) except FlowExchangeError:", "categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories) # Create a new", "url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h = httplib2.Http() result =", "def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def", "return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__': app.secret_key =", "categories=categories) else: return render_template('category.html', items=items, category=itemCategory, categories=categories) # Show an", "User from oauth2client.client import flow_from_clientsecrets from oauth2client.client import FlowExchangeError import", "token error handling if result.get('error') is not None: response =", "None: login_session['user_id'] = getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) #", "out.\") return redirect(url_for('showCatalog')) else: flash(\"No user has been logged in.\")", "login_session['token'] = token output = '' output += '<h1>Welcome, {}!</h1>'.format(login_session['username'])", "access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) # noqa", "= session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories", "'POST']) def deleteCategoryItem(category, item): if 'token' not in login_session: return", "session.commit() flash('New Item {} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return", "if login_session['provider'] == 'google': del login_session['gplus_id'] if login_session['provider'] == 'facebook':", "name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id != editedItem.user_id: flash('You", "items]) @app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all()", "login_session['email'] = data['email'] elif provider == 'facebook': access_token = request.data", "# Show all Categories and the latest items @app.route('/') @app.route('/catalog')", "login_session['facebook_id'] = data['id'] # Checks if user exists in DB", "to process oauth login. params:(string) oauth provider \"\"\" if provider", "'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json',", "item): itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize],", "showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all()", "from flask import Flask, render_template, request, redirect, jsonify, g from", "import FlowExchangeError import httplib2 import json import requests app =", "data['id'] # Checks if user exists in DB if getUserID(login_session['email'])", "flow_from_clientsecrets from oauth2client.client import FlowExchangeError import httplib2 import json import", "itemCategory = session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=itemCategory.id).first() return jsonify(Category=[itemCategory.serialize], Item=[item.serialize])", "the json files CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID =", "session.query(Item).filter_by(category_id=itemCategory.id).all() return jsonify(Categories=[itemCategory.serialize], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/<item>/JSON') def", "in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item): category = session.query(Category).filter_by(name=category).first()", "flash('You are not authorized to edit {}.'.format(item)) return redirect(url_for('showCategoryItem', category=categoryItem.name,", "# Login handler @app.route('/login') def showLogin(): \"\"\"JSON API to view", "try: # Upgrade auth code into credentials object oauth_flow =", "'facebook': del login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del login_session['picture'] del", "user.id != itemToDelete.user_id: flash('You are not authorized to delete {}.'.format(item))", "= create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session =", "login_session['provider'] == 'facebook': del login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del", "= result login_session['access_token'] = access_token login_session['provider'] = 'facebook' login_session['username'] =", "json.loads(h.request(url, 'GET')[1]) # Strip expire tag from access token access_token", "httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) # Strip expire tag from", "Show all Categories and the latest items @app.route('/') @app.route('/catalog') def", "deleteCategoryItem(category, item): if 'token' not in login_session: return redirect('/login') user", "itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You", "render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__': app.secret_key = 'N10kuN!'", "params:(string) oauth provider \"\"\" if provider == 'google': code =", "item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET',", "redirect(url_for('showCatalog')) else: return render_template('deletecategoryitem.html', category=categoryItem.name, item=itemToDelete.name) if __name__ == '__main__':", "Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item {} Successfully", "= engine DBSession = sessionmaker(bind=engine) session = DBSession() # Login", "login_session['user_id'] = getUserID(login_session['email']) # Stores token in session user =", "def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id except: return", "category=categoryItem.name, item=editedItem.name)) if request.method == 'POST': if request.form['name']: editedItem.name =", "if request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return", "for c in categories], Items=[i.serialize for i in items]) @app.route('/catalog/<category>/JSON')", "output += 'style = \"width: 300px; height: 300px; border-radius: 150px;'", "'token' not in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem", "from flask import url_for, flash, make_response from flask import session", "Show an item in a category @app.route('/catalog/<category>/<item>/') def showCategoryItem(category, item):", "categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id", "editedItem=editedItem) # Delete a category item @app.route('/catalog/<category>/<item>/delete', methods=['GET', 'POST']) def", "def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories =", "response.headers['Content-Type'] = 'application/json' return response # Check for valid access", "items=items, category=itemCategory, categories=categories) # Show an item in a category", "item=editedItem.name)) if request.method == 'POST': if request.form['name']: editedItem.name = request.form['name']", "login_session['picture'] = data['picture'] login_session['email'] = data['email'] elif provider == 'facebook':", "user.id != editedItem.user_id: flash('You are not authorized to edit {}.'.format(item))", "= 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt': 'json'} answer =", "!= editedItem.user_id: flash('You are not authorized to edit {}.'.format(item)) return", "import url_for, flash, make_response from flask import session as login_session", "render_template('publiccatalog.html', categories=categories, items=items) else: return render_template('catalog.html', categories=categories, items=items) # Show", "= session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() categories = session.query(Category).all() if 'token'", "request.method == 'POST': session.delete(itemToDelete) session.commit() flash('Item Successfully Deleted') return redirect(url_for('showCatalog'))", "create_engine('sqlite:///catalog.db') Base.metadata.bind = engine DBSession = sessionmaker(bind=engine) session = DBSession()", "response # Check for valid access token access_token = credentials.access_token", "# Get user info data = result login_session['access_token'] = access_token", "categories=categories) # Create a new item @app.route('/catalog/category/new/', methods=['GET', 'POST']) def", "Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) # Edit a", "'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url,", "'facebook' login_session['username'] = data['name'] login_session['email'] = data['email'] login_session['picture'] = data['picture']['data']['url']", "= session.query(User).filter_by(email=login_session['email']).one() if request.method == 'POST': category = session.query(Category).filter_by( name=request.form['category']).first()", "a category item @app.route('/catalog/<category>/') def showCatalogCategory(category): itemCategory = session.query(Category).filter_by(name=category).first() items", "= session.query(Category).filter_by(name=category).first() item = session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if", "render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories) #", "# Store access token in session login_session['provider'] = 'google' login_session['access_token']", "@app.route('/catalog/<category>/JSON') def catalogCategoryJSON(category): itemCategory = session.query(Category).filter_by(name=category).first() items = session.query(Item).filter_by(category_id=itemCategory.id).all() return", "return output def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser)", "session.query(Item).order_by(Item.category_id).limit(3) if 'token' not in login_session: return render_template('publiccatalog.html', categories=categories, items=items)", "login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete", "APIs to view Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories =", "in as {}'.format(login_session['username'])) return output def createUser(login_session): newUser = User(username=login_session['username'],", "a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def editCategoryItem(category, item): if", "methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves provider to process oauth login.", "User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user =", "in login_session: return redirect('/login') user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first()", "Access token error handling if result.get('error') is not None: response", "editedItem.description = request.form['description'] if request.form['category']: category = session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id", "return jsonify(Category=[itemCategory.serialize], Item=[item.serialize]) # Show all Categories and the latest", "login_session['access_token'], 'alt': 'json'} answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text)", "token and reset login_session @app.route('/logout') def logout(): if 'provider' in", "flash('Now logged in as {}'.format(login_session['username'])) return output def createUser(login_session): newUser", "Information.\"\"\" return render_template('login.html') # Third Party Oauth callback @app.route('/oauth/<provider>', methods=['POST'])", "in items]) @app.route('/catalog/<category>/<item>/JSON') def categoryItemJSON(category, item): itemCategory = session.query(Category).filter_by(name=category).first() item", "newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try:", "\\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa h =", "sessionmaker from models import Base, Category, Item, User from oauth2client.client", "session.query(Category).filter_by( name=request.form['category']).first() editedItem.category_id = category.id session.add(editedItem) session.commit() flash('Item Successfully Edited')", "Database and create database session engine = create_engine('sqlite:///catalog.db') Base.metadata.bind =", "result = json.loads(h.request(url, 'GET')[1]) # Strip expire tag from access", "categories = session.query(Category).all() items = session.query(Item).order_by(Item.category_id).limit(3) return jsonify(Categories=[c.serialize for c", "Login handler @app.route('/login') def showLogin(): \"\"\"JSON API to view entire", "# Checks if user exists in DB if getUserID(login_session['email']) is", "user exists in DB if getUserID(login_session['email']) is not None: login_session['user_id']", "json import requests app = Flask(__name__) # Retrieves client ID's", "'r') .read())['web']['app_id'] APP_SECRET = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_secret'] # Connect to", "oauthLogin(provider): \"\"\" Retrieves provider to process oauth login. params:(string) oauth", "login_session['email'] del login_session['token'] flash(\"You have been successfully logged out.\") return", "{} Successfully Added'.format(newItem.name)) return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) #", "from access token access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\", "= 'https://graph.facebook.com/oauth/access_token?grant_type=' \\ 'fb_exchange_token&client_id={}&client_secret={}&' \\ 'fb_exchange_token={}'.format(APP_ID, APP_SECRET, access_token) # noqa", "300px; border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius: 150px;\">' flash('Now logged in", "render_template('category.html', items=items, category=itemCategory, categories=categories) # Show an item in a", "= session.query(Category).all() if 'token' not in login_session: return render_template('publiccategory.html', items=items,", "return redirect(url_for('showCatalog')) else: return render_template('newcategoryitem.html', categories=categories) # Edit a category", "Party Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves provider", "flash(\"No user has been logged in.\") return redirect(url_for('showCatalog')) # JSON", "{}'.format(login_session['username'])) return output def createUser(login_session): newUser = User(username=login_session['username'], email=login_session['email'], picture=login_session['picture'])", "\"\"\"JSON API to view entire catalog Information.\"\"\" return render_template('login.html') #", "'google' login_session['access_token'] = access_token login_session['gplus_id'] = credentials.id_token['sub'] # Get user", "user = session.query(User).filter_by(email=login_session['email']).first() categoryItem = session.query(Category).filter_by(name=category).first() itemToDelete = session.query(Item).filter_by( name=item,", "Oauth callback @app.route('/oauth/<provider>', methods=['POST']) def oauthLogin(provider): \"\"\" Retrieves provider to", "and reset login_session @app.route('/logout') def logout(): if 'provider' in login_session:", "in login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return render_template('categoryitem.html', item=item,", "= session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if user.id !=", "JSON APIs to view Category Information. @app.route('/catalog/JSON') def catalogJSON(): categories", "return redirect(url_for('showCategoryItem', category=categoryItem.name, item=itemToDelete.name)) if request.method == 'POST': session.delete(itemToDelete) session.commit()", "ID's and secrets from the json files CLIENT_ID = json.loads(open('client_secrets.json',", "login_session['username'] del login_session['picture'] del login_session['email'] del login_session['token'] flash(\"You have been", "category=itemCategory, categories=categories) # Show an item in a category @app.route('/catalog/<category>/<item>/')", "= session.query(User).filter_by(email=email).one() return user.id except: return None # Revoke current", "redirect, jsonify, g from flask import url_for, flash, make_response from", "DB if getUserID(login_session['email']) is not None: login_session['user_id'] = getUserID(login_session['email']) else:", "= session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all()", "token access_token = result['access_token'] url = 'https://graph.facebook.com/v2.11/me?access_token={}&fields=' \\ 'name,id,email,picture'.format(access_token) #", "login_session['facebook_id'] del login_session['access_token'] del login_session['username'] del login_session['picture'] del login_session['email'] del", "= request.form['name'] if request.form['description']: editedItem.description = request.form['description'] if request.form['category']: category", "Strip expire tag from access token access_token = result['access_token'] url", "getUserID(login_session['email']) else: createUser(login_session) login_session['user_id'] = getUserID(login_session['email']) # Stores token in", "redirect('/login') categories = session.query(Category).all() user = session.query(User).filter_by(email=login_session['email']).one() if request.method ==", "login_session['provider'] = 'facebook' login_session['username'] = data['name'] login_session['email'] = data['email'] login_session['picture']", "# Upgrade auth code into credentials object oauth_flow = flow_from_clientsecrets('client_secrets.json',", "CLIENT_ID = json.loads(open('client_secrets.json', 'r') .read())['web']['client_id'] APP_ID = json.loads(open('fb_client_secrets.json', 'r') .read())['web']['app_id']", "return render_template('newcategoryitem.html', categories=categories) # Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET',", "import Flask, render_template, request, redirect, jsonify, g from flask import", "in DB if getUserID(login_session['email']) is not None: login_session['user_id'] = getUserID(login_session['email'])", "= \"width: 300px; height: 300px; border-radius: 150px;' \\ '-webkit-border-radius: 150px;-moz-border-radius:", "= DBSession() # Login handler @app.route('/login') def showLogin(): \"\"\"JSON API", "result = json.loads(h.request(url, 'GET')[1]) # Get user info data =", "flash(\"You have been successfully logged out.\") return redirect(url_for('showCatalog')) else: flash(\"No", "= User(username=login_session['username'], email=login_session['email'], picture=login_session['picture']) session.add(newUser) session.commit() def getUserID(email): try: user", "'token' not in login_session: return render_template('publiccategoryitem.html', item=item, category=category, categories=categories) return", "try: user = session.query(User).filter_by(email=email).one() return user.id except: return None #", "session.query(Category).all() if user.id != editedItem.user_id: flash('You are not authorized to", "session.add(newUser) session.commit() def getUserID(email): try: user = session.query(User).filter_by(email=email).one() return user.id", "= session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() if user.id != itemToDelete.user_id: flash('You are", "return redirect(url_for('showCategoryItem', category=request.form['category'], item=editedItem.name)) else: return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories,", "info data = result login_session['access_token'] = access_token login_session['provider'] = 'facebook'", "token access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token) h", "items=items, category=itemCategory, categories=categories) else: return render_template('category.html', items=items, category=itemCategory, categories=categories) #", "= session.query(Item).filter_by(name=item, category_id=category.id).first() categories = session.query(Category).all() if 'token' not in", "= request.data try: # Upgrade auth code into credentials object", "# JSON APIs to view Category Information. @app.route('/catalog/JSON') def catalogJSON():", "= data['email'] login_session['picture'] = data['picture']['data']['url'] login_session['facebook_id'] = data['id'] # Checks", "session.query(Category).filter_by(name=category).first() editedItem = session.query(Item).filter_by( name=item, category_id=categoryItem.id).first() categories = session.query(Category).all() if", "if 'token' not in login_session: return render_template('publiccatalog.html', categories=categories, items=items) else:", "= json.loads(h.request(url, 'GET')[1]) # Access token error handling if result.get('error')", "Store access token in session login_session['provider'] = 'google' login_session['access_token'] =", "return render_template('catalog.html', categories=categories, items=items) # Show Items in a category", "category=category, categories=categories) return render_template('categoryitem.html', item=item, category=category, categories=categories) # Create a", "= Item(name=request.form['name'], description=request.form['description'], category_id=category.id, user_id=user.id) session.add(newItem) session.commit() flash('New Item {}", "access token access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?' \\ 'access_token={}'.format(access_token)", "= credentials.id_token['sub'] # Get user info userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params", "'name,id,email,picture'.format(access_token) # noqa h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1])", "categories=categories) # Edit a category item @app.route('/catalog/<category>/<item>/edit', methods=['GET', 'POST']) def", "return render_template('editcategoryitem.html', category=categoryItem.name, item=editedItem.name, categories=categories, editedItem=editedItem) # Delete a category", "== 'google': del login_session['gplus_id'] if login_session['provider'] == 'facebook': del login_session['facebook_id']", "def deleteCategoryItem(category, item): if 'token' not in login_session: return redirect('/login')" ]
[ "import STORE, FileStore, Store __all__ = ( \"FileStore\", \"NooException\", \"Store\",", "from .echo import echo, set_quiet from .errors import NooException, cancel", "FileStore, Store __all__ = ( \"FileStore\", \"NooException\", \"Store\", \"STORE\", \"cancel\",", "import NooException, cancel from .store import STORE, FileStore, Store __all__", ".store import STORE, FileStore, Store __all__ = ( \"FileStore\", \"NooException\",", "cancel from .store import STORE, FileStore, Store __all__ = (", "from .store import STORE, FileStore, Store __all__ = ( \"FileStore\",", "NooException, cancel from .store import STORE, FileStore, Store __all__ =", "set_quiet from .errors import NooException, cancel from .store import STORE,", "from .errors import NooException, cancel from .store import STORE, FileStore,", ".echo import echo, set_quiet from .errors import NooException, cancel from", "__all__ = ( \"FileStore\", \"NooException\", \"Store\", \"STORE\", \"cancel\", \"echo\", \"set_quiet\",", "= ( \"FileStore\", \"NooException\", \"Store\", \"STORE\", \"cancel\", \"echo\", \"set_quiet\", )", "STORE, FileStore, Store __all__ = ( \"FileStore\", \"NooException\", \"Store\", \"STORE\",", ".errors import NooException, cancel from .store import STORE, FileStore, Store", "import echo, set_quiet from .errors import NooException, cancel from .store", "echo, set_quiet from .errors import NooException, cancel from .store import", "Store __all__ = ( \"FileStore\", \"NooException\", \"Store\", \"STORE\", \"cancel\", \"echo\"," ]
[ "return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf,", "[] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp = time.time() self.frame_counter", "= 1.0 / (camera_far_plane - camera_near_plane) elif depth_format == DepthFormat.Millimeters:", "a in enumerate(metadata['agents']): e = Event(a) image_mapping = dict( image=e.add_image,", "= read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame =", "os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None self.app = app", "= self.metadata['agent'] loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0)", "be retrieved from event.frame and encoded to an image format\")", "0) if depth_format == DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane", "rotation = int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x']", "Flask, request, make_response, abort import werkzeug import werkzeug.serving import werkzeug.http", "pair of request/response queues. \"\"\" import json import logging import", "image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in image_mapping.keys(): if key", "self.flow_frame = None self.color_to_object_id = {} self.object_id_to_color = {} self.instance_detections2D", "self.screen_height)) def add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height,", "cls = color_name simObj = False if '|' in cls:", "well as the metadata sent about each object \"\"\" def", "self.screen_height) depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] /", "for Artificial Intelligence 2017 \"\"\" ai2thor.server Handles all communication with", "self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames", "request/response queues. \"\"\" import json import logging import sys import", "@app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form =", "self.instance_segmentation_frame is None: return MIN_DETECTION_LEN = 0 self.instance_detections2D = {}", "old_wfile = self.wfile self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi()", "return self.wfile.close() @property def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def", "def cv2image(self): warnings.warn(\"Deprecated - please use event.cv2img\") return self.cv2img @property", "sent about each object \"\"\" def __init__(self, metadata): self.metadata =", "view[body_offset + 4: next_offset] i = next_offset headers = {}", "**kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data):", "self.app = app self.client_token = None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False,", "width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype)", "= MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event = events[0] for", "None self.class_segmentation_frame = None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames", "self.third_party_depth_frames = [] self.third_party_normals_frames = [] self.third_party_flows_frames = [] self.events", "== DepthFormat.Millimeters: multiplier = 1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width,", "raw_headers = view[headers_offset: body_offset] body = view[body_offset + 4: next_offset]", "self.wfile.flush() def close(self): return self.wfile.close() @property def closed(self): return self.wfile.closed", "self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def", "DepthFormat.Millimeters: multiplier = 1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height,", "super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return result class MultiAgentEvent(object): def", "image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out =", "now = time.time() # rate = self.debug_frames_per_interval / float(now -", "= MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0] else:", "self.screen_height = self._active_event.screen_height self.events = events self.third_party_camera_frames = [] #", "[] # XXX add methods for depth,sem_seg @property def cv2img(self):", "image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data,", "events): self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width", "return_index and return_inverse: return unique, idx, inv elif return_index: return", "20.0 self.depth_format = depth_format self.add_depth_noise = add_depth_noise self.noise_indices = None", "form.form['token'][0] else: form = request metadata = json.loads(form.form['metadata']) token =", "RGB data can be retrieved from event.frame and encoded to", "image format\") return None def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D)", "= queue_get(response_queue) if 'sequenceId' not in next_action: self.sequence_id += 1", "0, Normalized = 1, Millimeters = 2 class Server(object): def", "else: if ctype == 'text/plain' and 'charset' in ct_opts: body", "if sys.version_info.major < 3 \\ else np.frombuffer(buf, dtype=dtype) im =", "we are receiving frames for the action we sent self.sequence_id", "# Copyright Allen Institute for Artificial Intelligence 2017 \"\"\" ai2thor.server", "events[0] for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter +=", "self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width,", "if obj['objectType'] == object_type] def process_colors_ids(self): if self.instance_segmentation_frame is None:", "in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in self.instance_detections2D) def", "of request/response queues. \"\"\" import json import logging import sys", "= arr[idx] if return_index and return_inverse: return unique, idx, inv", "unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:,", "request_queue.put_nowait(event) self.frame_counter += 1 next_action = queue_get(response_queue) if 'sequenceId' not", "{} self.instance_detections2D = None self.instance_masks = {} self.class_masks = {}", "class Server(object): def __init__( self, request_queue, response_queue, host, port=0, threaded=False,", "self.debug_frames_per_interval == 0: now = time.time() # rate = self.debug_frames_per_interval", "2017 \"\"\" ai2thor.server Handles all communication with Unity through a", "self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs): image_depth", "in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth(", "height=300 ): app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates')))", "= None if add_depth_noise: assert width == height,\\ \"Noise supported", "metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form = request", "form.form['token'] if self.client_token and token != self.client_token: abort(403) if self.frame_counter", "return_inverse=True) else: _, idx = np.unique(b, return_index=True) unique = arr[idx]", "= 1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height,", "class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile self.wfile = BufferedIO(self.wfile)", "= np.unique(b, return_index=True) unique = arr[idx] if return_index and return_inverse:", "in request_headers: if h == 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value)", "cls: cls = cls.split('|')[0] simObj = True bb = np.array(color_bounds['bounds'])", "class Event(object): \"\"\" Object that is returned from a call", "event.frame and encoded to an image format\") return None def", "idx, inv elif return_index: return unique, idx elif return_inverse: return", "color_name simObj = False if '|' in cls: cls =", "= form.form['token'][0] else: form = request metadata = json.loads(form.form['metadata']) token", "except ImportError: from Queue import Empty import time import warnings", "= werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary return None def", "and self.metadata['colors']: for color_data in self.metadata['colors']: name = color_data['name'] c_key", "def cv2img(self): return self.frame[...,::-1] @property def pose(self): agent_meta = self.metadata['agent']", "agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon'] *", "arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if", "read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(),", "depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier = 1.0 camera_far_plane", "5000 # get with timeout to allow quit def queue_get(que):", "self.frame = None self.depth_frame = None self.normals_frame = None self.flow_frame", "len(events) > 1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events) else:", "[] self.third_party_depth_frames = [] self.third_party_normals_frames = [] self.third_party_flows_frames = []", "= read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width,", "in next_action: self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id", "= request metadata = json.loads(form.form['metadata']) token = form.form['token'] if self.client_token", "in cls: cls = cls.split('|')[0] simObj = True bb =", "import warnings from flask import Flask, request, make_response, abort import", "werkzeug import werkzeug.serving import werkzeug.http import numpy as np from", "= None self.depth_frame = None self.normals_frame = None self.flow_frame =", "= super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return result class MultiAgentEvent(object):", "while i >= 0: next_offset = data.find(full_boundary, i + len(full_boundary))", "frames for the action we sent self.sequence_id = 0 self.last_event", "Empty except ImportError: from Queue import Empty import time import", "werkzeug.http import numpy as np from enum import Enum from", "= im[..., ::-1] return im def unique_rows(arr, return_index=False, return_inverse=False): arr", "obj['objectId'] in self.instance_detections2D) def process_colors(self): if 'colors' in self.metadata and", "self.client_token and token != self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval", "add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self,", "self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp =", "obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in self.instance_detections2D)", "self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return", "def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close() @property def", "= [] self.third_party_depth_frames = [] self.third_party_normals_frames = [] self.third_party_flows_frames =", "= None self.normals_frame = None self.flow_frame = None self.color_to_object_id =", "== DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *= multiplier / 256.0", "= kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format ==", "if obj['objectId'] == object_id: return obj return None class MultipartFormParser(object):", "== object_type] def process_colors_ids(self): if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN", "werkzeug.serving import werkzeug.http import numpy as np from enum import", "step_size = 0.25 agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation", "from queue import Empty except ImportError: from Queue import Empty", "1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in", "self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain' and", "[] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj:", "self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs):", "len(full_boundary)) if next_offset < 0: break headers_offset = i +", "self, request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300", "add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data):", "request metadata = json.loads(form.form['metadata']) token = form.form['token'] if self.client_token and", "return unique, idx, inv elif return_index: return unique, idx elif", "image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) *", "key in image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping =", "DepthFormat(Enum): Meters = 0, Normalized = 1, Millimeters = 2", "res = que.get(block=True, timeout=0.5) break except Empty: pass return res", "- please use event.cv2img\") return self.cv2img @property def cv2img(self): return", "h == 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii')", "= {} self.class_masks = {} self.instance_segmentation_frame = None self.class_segmentation_frame =", "self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = [] self.third_party_normals_frames = [] self.third_party_flows_frames", "queues. \"\"\" import json import logging import sys import os", "and len(self.instance_detections2D) > 0: for obj in self.metadata['objects']: obj['visibleBounds2D'] =", "read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def", "def close(self): return self.wfile.close() @property def closed(self): return self.wfile.closed class", "/ np.float32(256 ** 2) multiplier = 1.0 if depth_format !=", "def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame", "if depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif", "import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version =", "< MIN_DETECTION_LEN): if cls not in self.class_detections2D: self.class_detections2D[cls] = []", "screenshot that Unity captures as well as the metadata sent", "= BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return", "width, -1) if flip_y: im = np.flip(im, axis=0) if flip_x:", "= depth_format self.add_depth_noise = add_depth_noise self.noise_indices = None if add_depth_noise:", "import sys import os import os.path try: from queue import", "- kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier *= 1000 image_depth_out", "+ image_depth[:,:,2] / np.float32(256 ** 2) multiplier = 1.0 if", "= time.time() self.frame_counter = 0 self.debug_frames_per_interval = 50 self.xwindow_id =", "objects_by_type(self, object_type): return [obj for obj in self.metadata['objects'] if obj['objectType']", "(bb[3] - bb[1]) < MIN_DETECTION_LEN): if cls not in self.class_detections2D:", "np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self, wfile): self.wfile", "as np from enum import Enum from ai2thor.util.depth import apply_real_noise,", "and 'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not", "bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2]", "return unique, inv else: return unique class Event(object): \"\"\" Object", "are receiving frames for the action we sent self.sequence_id =", "process_colors(self): if 'colors' in self.metadata and self.metadata['colors']: for color_data in", "arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx, inv =", "= [] self.third_party_flows_frames = [] self.events = [self] # Ensure", "queue_get(que): res = None while True: try: res = que.get(block=True,", "= int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] /", "None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames", "unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks =", "image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices", "== 'text/plain' and 'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if", "image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated - please use", "try: res = que.get(block=True, timeout=0.5) break except Empty: pass return", "cd_opts['name'] not in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum):", "x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals,", "* 1000), rotation, horizon) @property def pose_discrete(self): # XXX should", "self.port = port self.last_rate_timestamp = time.time() self.frame_counter = 0 self.debug_frames_per_interval", "def process_colors_ids(self): if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN = 0", "process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) > 0: for obj in", "= None self.color_to_object_id = {} self.object_id_to_color = {} self.instance_detections2D =", "**kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self,", "__init__(self, active_agent_id, events): self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width", "idx = np.unique(b, return_index=True) unique = arr[idx] if return_index and", "os import os.path try: from queue import Empty except ImportError:", "horizon = round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] * 1000), round(loc['z']", "self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8,", "with timeout to allow quit def queue_get(que): res = None", "add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame =", "warnings.warn(\"Deprecated - please use event.cv2img\") return self.cv2img @property def cv2img(self):", "MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event = events[active_agent_id] self.metadata =", "Unity through a Flask service. Messages are sent to the", "axis=1) if flip_rb_colors: im = im[..., ::-1] return im def", "third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane,", "image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image,", "in self.metadata and self.metadata['colors']: for color_data in self.metadata['colors']: name =", "an image format\") return None def process_visible_bounds2D(self): if self.instance_detections2D and", "else: return unique class Event(object): \"\"\" Object that is returned", "< MIN_DETECTION_LEN or (bb[3] - bb[1]) < MIN_DETECTION_LEN): if cls", "= [] def write(self, output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush()", "class wraps the screenshot that Unity captures as well as", "= c_key def objects_by_type(self, object_type): return [obj for obj in", "* 1000), round(loc['z'] * 1000), rotation, horizon) @property def pose_discrete(self):", "metadata sent about each object \"\"\" def __init__(self, metadata): self.metadata", "- bb[0]) < MIN_DETECTION_LEN or (bb[3] - bb[1]) < MIN_DETECTION_LEN):", "Flask service. Messages are sent to the controller using a", "quit def queue_get(que): res = None while True: try: res", "height,\\ \"Noise supported with square dimension images only.\" self.noise_indices =", "dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ),", "super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self, wfile): self.wfile = wfile", "color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj: self.instance_detections2D[color_name] =", "self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs):", "return_inverse: return unique, idx, inv elif return_index: return unique, idx", "= time.time() # rate = self.debug_frames_per_interval / float(now - self.last_rate_timestamp)", "return 'pong' @app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data':", "flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data,", "depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows", "Millimeters = 2 class Server(object): def __init__( self, request_queue, response_queue,", "unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize", "= data.find(full_boundary) while i >= 0: next_offset = data.find(full_boundary, i", "all communication with Unity through a Flask service. Messages are", "k,v = header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type'])", "i + len(full_boundary)) if next_offset < 0: break headers_offset =", "= [] # XXX add methods for depth,sem_seg @property def", "else: self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp", "value in request_headers: if h == 'Content-Type': ctype, ct_opts =", "self.sequence_id = 0 self.last_event = None self.camera_near_plane = 0.1 self.camera_far_plane", "/ (camera_far_plane - camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier =", "in self.metadata['objects'] if obj['objectType'] == object_type] def process_colors_ids(self): if self.instance_segmentation_frame", "False if '|' in cls: cls = cls.split('|')[0] simObj =", "idx elif return_inverse: return unique, inv else: return unique class", "warnings.warn(\"Event.image_data has been removed - RGB data can be retrieved", "{} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()] =", "self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1]", "pose(self): agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y']", "@property def pose(self): agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation", "app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer =", "event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event = events[0]", "def __init__(self, wfile): self.wfile = wfile self.data = [] def", "for ti, t in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if", "from event.frame and encoded to an image format\") return None", "image_depth[:,:,2] / np.float32(256 ** 2) multiplier = 1.0 if depth_format", "self.metadata['objects']: if obj['objectId'] == object_id: return obj return None class", "MAX_DEPTH = 5000 # get with timeout to allow quit", "color), 'background') cls = color_name simObj = False if '|'", "self.metadata['objects'] if obj['objectType'] == object_type] def process_colors_ids(self): if self.instance_segmentation_frame is", "unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :,", "return unique class Event(object): \"\"\" Object that is returned from", "i, a in enumerate(metadata['agents']): e = Event(a) image_mapping = dict(", "bb[[3,1]] if not((bb[2] - bb[0]) < MIN_DETECTION_LEN or (bb[3] -", "c_key def objects_by_type(self, object_type): return [obj for obj in self.metadata['objects']", "only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return 'pong'", "= next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self):", "a['thirdPartyCameras'] is not None: for ti, t in enumerate(a['thirdPartyCameras']): for", "return self.frame[...,::-1] @property def pose(self): agent_meta = self.metadata['agent'] loc =", "= agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon']", "metadata = json.loads(form.form['metadata']) token = form.form['token'] if self.client_token and token", "threaded=threaded, request_handler=ThorRequestHandler) # used to ensure that we are receiving", "self.instance_masks = {} self.class_masks = {} self.instance_segmentation_frame = None self.class_segmentation_frame", "ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile self.wfile = BufferedIO(self.wfile) result", "XXX should have this as a parameter step_size = 0.25", "to ensure that we are receiving frames for the action", "return self.cv2img @property def cv2img(self): return self.frame[...,::-1] @property def pose(self):", "= None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port", "{} full_boundary = b'\\r\\n--' + boundary view = memoryview(data) i", "self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls not", "e = Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda x: e.add_image_depth(", "key in third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if", "None if add_depth_noise: assert width == height,\\ \"Noise supported with", "in self.instance_detections2D) def process_colors(self): if 'colors' in self.metadata and self.metadata['colors']:", "image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] / np.float32(256 **", "Messages are sent to the controller using a pair of", "boundary return None def __init__(self, data, boundary): self.form = {}", "rotation, horizon) @property def pose_discrete(self): # XXX should have this", "horizon) @property def pose_discrete(self): # XXX should have this as", "apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 #", "e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows", "bb[1]) < MIN_DETECTION_LEN): if cls not in self.class_detections2D: self.class_detections2D[cls] =", "this as a parameter step_size = 0.25 agent_meta = self.metadata['agent']", "can be retrieved from event.frame and encoded to an image", "*= 1000 image_depth_out *= multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32)", "[self] # Ensure we have a similar API to MultiAgentEvent", "timeout to allow quit def queue_get(que): res = None while", "= header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp,", "while True: try: res = que.get(block=True, timeout=0.5) break except Empty:", "= np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse:", "/ step_size), rotation, horizon) def get_object(self, object_id): for obj in", "self.events = [self] # Ensure we have a similar API", "in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1: self.last_event =", "add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data):", "image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not None:", "self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data,", "*= multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in", "round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x']", "in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters =", "body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset: body_offset] body =", "if metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence id mismatch: %s vs", "import numpy as np from enum import Enum from ai2thor.util.depth", "= np.unique(b, return_index=True, return_inverse=True) else: _, idx = np.unique(b, return_index=True)", "t in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if key in", "< 0: break headers_offset = i + len(full_boundary) + 2", "self.frame_counter += 1 next_action = queue_get(response_queue) if 'sequenceId' not in", "self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self,", "np.float32(256 ** 2) multiplier = 1.0 if depth_format != DepthFormat.Normalized:", "0: now = time.time() # rate = self.debug_frames_per_interval / float(now", "queue_get(response_queue) if 'sequenceId' not in next_action: self.sequence_id += 1 next_action['sequenceId']", "ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form: self.form[cd_opts['name']]", "multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in kwargs", "if return_index and return_inverse: return unique, idx, inv elif return_index:", "256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in kwargs and kwargs['add_noise']:", "\\ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1) if", "= kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized: multiplier = 1.0", "self.metadata['colors']: for color_data in self.metadata['colors']: name = color_data['name'] c_key =", "v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert", "self.class_masks = {} self.instance_segmentation_frame = None self.class_segmentation_frame = None self.class_detections2D", "write(self, output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return", "raise ValueError(\"Sequence id mismatch: %s vs %s\" % ( metadata['sequenceId'],", "image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids,", "image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not None: for ti,", "%s\" % ( metadata['sequenceId'], self.sequence_id)) events = [] for i,", "vs %s\" % ( metadata['sequenceId'], self.sequence_id)) events = [] for", "= True bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]]", "(datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence id mismatch:", "square dimension images only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def", "if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1:", "ai2thor.server Handles all communication with Unity through a Flask service.", "ti, t in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if key", "close(self): return self.wfile.close() @property def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler):", "= [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = [] self.third_party_normals_frames =", "self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self):", "cls not in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind =", "self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain' and 'charset' in ct_opts:", "0: break headers_offset = i + len(full_boundary) + 2 body_offset", "self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events", "loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000) horizon =", "flip_y: im = np.flip(im, axis=0) if flip_x: im = np.flip(im,", "None while True: try: res = que.get(block=True, timeout=0.5) break except", "def get_object(self, object_id): for obj in self.metadata['objects']: if obj['objectId'] ==", "def write(self, output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self):", "**kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data,", "# used to ensure that we are receiving frames for", "inv elif return_index: return unique, idx elif return_inverse: return unique,", "XXX add methods for depth,sem_seg @property def cv2img(self): return self._active_event.cv2img", "return obj return None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for", "assert cdisp == 'form-data' if 'filename' in cd_opts: if cd_opts['name']", "obj['objectType'] == object_type] def process_colors_ids(self): if self.instance_segmentation_frame is None: return", "{} self.class_masks = {} self.instance_segmentation_frame = None self.class_segmentation_frame = None", "depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app = Flask(__name__, template_folder=os.path.realpath( os.path.join(", "self.metadata['agent'] loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000) horizon", "add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane',", "ensure that we are receiving frames for the action we", "BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return result", "return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize *", "res = None while True: try: res = que.get(block=True, timeout=0.5)", "axis=1)) if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind, ...]", "resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane = init_params['cameraNearPlane']", "depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format,", "unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color =", "'HTTP/1.1' MAX_DEPTH = 5000 # get with timeout to allow", "multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane',", "= 'HTTP/1.1' MAX_DEPTH = 5000 # get with timeout to", "methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(),", "generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return 'pong' @app.route('/train', methods=['post']) def", "old_wfile return result class MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event", "is not None: for ti, t in enumerate(a['thirdPartyCameras']): for key", "except Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj):", "we sent self.sequence_id = 0 self.last_event = None self.camera_near_plane =", "datetime # print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] !=", "self.sequence_id)) events = [] for i, a in enumerate(metadata['agents']): e", "1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event =", "return None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h, value", "None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h, value in", "self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame = None self.depth_frame", "if 'filename' in cd_opts: if cd_opts['name'] not in self.files: self.files[cd_opts['name']]", "default(self, obj): if isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj)", "self.instance_detections2D = None self.instance_masks = {} self.class_masks = {} self.instance_segmentation_frame", "get_object(self, object_id): for obj in self.metadata['objects']: if obj['objectId'] == object_id:", "'sequenceId' not in next_action: self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id", "\"\"\" def __init__(self, metadata): self.metadata = metadata self.screen_width = metadata['screenWidth']", "/ float(now - self.last_rate_timestamp) self.last_rate_timestamp = now # import datetime", "np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']:", "dimension images only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping():", "- camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier = 1000.0 image_depth", "# XXX should have this as a parameter step_size =", "sys import os import os.path try: from queue import Empty", "= 20.0 self.depth_format = depth_format self.add_depth_noise = add_depth_noise self.noise_indices =", "format\") return None def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) >", "# rate = self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp =", "> 1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event", "self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self,", "self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated", "if h == 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary =", "MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form", "data, boundary): self.form = {} self.files = {} full_boundary =", "DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *= multiplier / 256.0 depth_image_float", "import os import os.path try: from queue import Empty except", "= np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background')", "np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name]", "normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data,", "are sent to the controller using a pair of request/response", "ct_opts['boundary'].encode('ascii') return boundary return None def __init__(self, data, boundary): self.form", "None self.instance_masks = {} self.class_masks = {} self.instance_segmentation_frame = None", "= None self.instance_masks = {} self.class_masks = {} self.instance_segmentation_frame =", "metadata['sequenceId'], self.sequence_id)) events = [] for i, a in enumerate(metadata['agents']):", "kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier *= 1000", "supported with square dimension images only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping',", "== np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids): for", "= self._active_event.screen_height self.events = events self.third_party_camera_frames = [] # XXX", "= np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] -", "depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256)", "os.path try: from queue import Empty except ImportError: from Queue", "= wfile self.data = [] def write(self, output): self.data.append(output) def", "make_response, abort import werkzeug import werkzeug.serving import werkzeug.http import numpy", "= Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x,", "camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for", "def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs):", "= dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane", "self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data,", "event = events[0] for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event)", "return resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane =", "add methods for depth,sem_seg @property def cv2img(self): return self._active_event.cv2img def", "return_inverse: _, idx, inv = np.unique(b, return_index=True, return_inverse=True) else: _,", "self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False):", "enumerate(metadata['agents']): e = Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda x:", "self.depth_format = depth_format self.add_depth_noise = add_depth_noise self.noise_indices = None if", "= unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1))", "for obj in self.metadata['objects'] if obj['objectType'] == object_type] def process_colors_ids(self):", "self.form = {} self.files = {} full_boundary = b'\\r\\n--' +", "in cd_opts: if cd_opts['name'] not in self.files: self.files[cd_opts['name']] = []", "::-1] return im def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy()", "__init__(self, data, boundary): self.form = {} self.files = {} full_boundary", "image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame =", "read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0] +", "captures as well as the metadata sent about each object", "headers_offset) raw_headers = view[headers_offset: body_offset] body = view[body_offset + 4:", "depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format", "= image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] / np.float32(256", "= werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to ensure", "camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier = 1000.0 image_depth =", "request, make_response, abort import werkzeug import werkzeug.serving import werkzeug.http import", "self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id = next_action['sequenceId']", "Event(object): \"\"\" Object that is returned from a call to", "**kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out", "int(loc['z'] / step_size), rotation, horizon) def get_object(self, object_id): for obj", "response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app", "len(full_boundary) + 2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset:", "self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...])", "0: for obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId']", "return [obj for obj in self.metadata['objects'] if obj['objectType'] == object_type]", "elif return_inverse: return unique, inv else: return unique class Event(object):", "print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id: raise", "= (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis,", "request_headers: if h == 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary", "= generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return 'pong' @app.route('/train', methods=['post'])", "get with timeout to allow quit def queue_get(que): res =", "-1) if flip_y: im = np.flip(im, axis=0) if flip_x: im", "if a['thirdPartyCameras'] is not None: for ti, t in enumerate(a['thirdPartyCameras']):", "import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000", "depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def", "add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data,", "with square dimension images only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get'])", "x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows )", "next_action['sequenceId'] = self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action,", "= self.wfile self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile", "= [] for i, a in enumerate(metadata['agents']): e = Event(a)", "agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return", "receiving frames for the action we sent self.sequence_id = 0", "+ boundary view = memoryview(data) i = data.find(full_boundary) while i", "self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc in", "token = form.form['token'] if self.client_token and token != self.client_token: abort(403)", "Allen Institute for Artificial Intelligence 2017 \"\"\" ai2thor.server Handles all", "_image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format =", "i = data.find(full_boundary) while i >= 0: next_offset = data.find(full_boundary,", "= np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs): image_depth =", "self.wfile.close() @property def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self):", "= agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon']))", "communication with Unity through a Flask service. Messages are sent", "'text/plain' and 'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name']", "== DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane - camera_near_plane) elif", "self.last_event = None self.camera_near_plane = 0.1 self.camera_far_plane = 20.0 self.depth_format", "{} self.instance_segmentation_frame = None self.class_segmentation_frame = None self.class_detections2D = {}", "self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to", "self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data):", "controller.step(). This class wraps the screenshot that Unity captures as", "similar API to MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data has been", "im def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b =", "@property def cv2img(self): return self.frame[...,::-1] @property def pose(self): agent_meta =", "for depth,sem_seg @property def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data):", "True bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if", "self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background') cls = color_name simObj", "and token != self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval ==", "[] def write(self, output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def", "self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width,", "self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width,", "np.unique(b, return_index=True, return_inverse=True) else: _, idx = np.unique(b, return_index=True) unique", "return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids),", "= image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs)", "MIN_DETECTION_LEN): if cls not in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb)", "key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x:", "ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH =", "if cls not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else:", "1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width)", "a call to controller.step(). This class wraps the screenshot that", "= events[0] for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter", "self.instance_detections2D and len(self.instance_detections2D) > 0: for obj in self.metadata['objects']: obj['visibleBounds2D']", "self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler)", "events = [] for i, a in enumerate(metadata['agents']): e =", "about each object \"\"\" def __init__(self, metadata): self.metadata = metadata", "header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts", "if not((bb[2] - bb[0]) < MIN_DETECTION_LEN or (bb[3] - bb[1])", "cv2image(self): warnings.warn(\"Deprecated - please use event.cv2img\") return self.cv2img @property def", "4: next_offset] i = next_offset headers = {} for header", "metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame = None self.depth_frame = None", "port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app = Flask(__name__,", "werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to ensure that", ":], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind,", "= [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp = time.time()", "self.third_party_camera_frames = [] # XXX add methods for depth,sem_seg @property", "1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized: multiplier", "horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z'] / step_size),", "ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary return None", "flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major", "== 0: now = time.time() # rate = self.debug_frames_per_interval /", "image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data,", "def ping(): return 'pong' @app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0]", "= events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height =", "arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx, inv = np.unique(b,", "self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data,", "if return_inverse: _, idx, inv = np.unique(b, return_index=True, return_inverse=True) else:", "json.loads(form.form['metadata']) token = form.form['token'] if self.client_token and token != self.client_token:", "depth_format self.add_depth_noise = add_depth_noise self.noise_indices = None if add_depth_noise: assert", "= 0 self.debug_frames_per_interval = 50 self.xwindow_id = None self.wsgi_server =", "+ image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] / np.float32(256 ** 2)", "Server(object): def __init__( self, request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters,", "add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True,", "1000), rotation, horizon) @property def pose_discrete(self): # XXX should have", "= None self.camera_near_plane = 0.1 self.camera_far_plane = 20.0 self.depth_format =", "[] self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain' and 'charset' in", "for key in third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e)", "= self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp = now #", "\"\"\" Object that is returned from a call to controller.step().", "camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is", "please use event.cv2img\") return self.cv2img @property def cv2img(self): return self.frame[...,::-1]", "= np.flip(im, axis=0) if flip_x: im = np.flip(im, axis=1) if", "a similar API to MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data has", "request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0])", "True: try: res = que.get(block=True, timeout=0.5) break except Empty: pass", "ping(): return 'pong' @app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] ==", "rotation, horizon) def get_object(self, object_id): for obj in self.metadata['objects']: if", "break headers_offset = i + len(full_boundary) + 2 body_offset =", "if 'colors' in self.metadata and self.metadata['colors']: for color_data in self.metadata['colors']:", "__init__( self, request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300,", "= port self.last_rate_timestamp = time.time() self.frame_counter = 0 self.debug_frames_per_interval =", "self._active_event.screen_height self.events = events self.third_party_camera_frames = [] # XXX add", "self.object_id_to_color = {} self.instance_detections2D = None self.instance_masks = {} self.class_masks", "= make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self,", "full_boundary = b'\\r\\n--' + boundary view = memoryview(data) i =", "dtype=dtype) im = im_bytes.reshape(height, width, -1) if flip_y: im =", "events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height", "a Flask service. Messages are sent to the controller using", "[] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = []", "Handles all communication with Unity through a Flask service. Messages", "self.instance_detections2D = {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse", "API to MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data has been removed", "train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata", "output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close()", "+ len(full_boundary) + 2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers =", "b'\\r\\n--' + boundary view = memoryview(data) i = data.find(full_boundary) while", "in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action =", "{} self.files = {} full_boundary = b'\\r\\n--' + boundary view", "0 self.last_event = None self.camera_near_plane = 0.1 self.camera_far_plane = 20.0", "read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height))", "'pong' @app.route('/train', methods=['post']) def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form", "# import datetime # print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate)) if", "run_wsgi(self): old_wfile = self.wfile self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler,", "werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 # get with timeout", "resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever() def", "kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2]", "flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \\", "class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.generic): return np.asscalar(obj)", "for i, a in enumerate(metadata['agents']): e = Event(a) image_mapping =", "= memoryview(data) i = data.find(full_boundary) while i >= 0: next_offset", "import os.path try: from queue import Empty except ImportError: from", "self.third_party_normals_frames = [] self.third_party_flows_frames = [] self.events = [self] #", "Normalized = 1, Millimeters = 2 class Server(object): def __init__(", "= round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon'] * 1000) return", "ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary return", "read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def", "float(now - self.last_rate_timestamp) self.last_rate_timestamp = now # import datetime #", "'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary", "depth_format == DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *= multiplier /", "self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to ensure that we are", "% self.debug_frames_per_interval == 0: now = time.time() # rate =", "self.color_to_object_id = {} self.object_id_to_color = {} self.instance_detections2D = None self.instance_masks", "= kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier *=", "%s/s\" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence", "+= 1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp", "arr.shape[1]))) if return_inverse: _, idx, inv = np.unique(b, return_index=True, return_inverse=True)", "depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in kwargs and kwargs['add_noise']: depth_image_float", "boundary view = memoryview(data) i = data.find(full_boundary) while i >=", "width=300, height=300 ): app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..',", "This class wraps the screenshot that Unity captures as well", "self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32)", "Unity captures as well as the metadata sent about each", "color_name = self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background') cls =", "def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self,", "height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if", "im = im[..., ::-1] return im def unique_rows(arr, return_index=False, return_inverse=False):", "if ctype == 'text/plain' and 'charset' in ct_opts: body =", "), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not", "= view[body_offset + 4: next_offset] i = next_offset headers =", "{} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames", "object \"\"\" def __init__(self, metadata): self.metadata = metadata self.screen_width =", "def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) > 0: for obj", "elif depth_format == DepthFormat.Millimeters: multiplier = 1000.0 image_depth = read_buffer_image(", "in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if ctype ==", "= None self.class_segmentation_frame = None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D()", "= [] self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain' and 'charset'", "image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ),", "if cd_opts['name'] not in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class", "camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras']", "1.0 if depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane']", "self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key def objects_by_type(self, object_type): return", "elif depth_format == DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *= multiplier", "< 3 \\ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width,", "None self.app = app self.client_token = None self.subscriptions = []", "= read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height))", "depth_format == DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane - camera_near_plane)", "self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile", "numpy as np from enum import Enum from ai2thor.util.depth import", "if self.instance_detections2D and len(self.instance_detections2D) > 0: for obj in self.metadata['objects']:", "= [self] # Ensure we have a similar API to", "dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise,", "que.get(block=True, timeout=0.5) break except Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder):", "controller using a pair of request/response queues. \"\"\" import json", "return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile self.wfile", "DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters:", "in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids -", "or (bb[3] - bb[1]) < MIN_DETECTION_LEN): if cls not in", "from flask import Flask, request, make_response, abort import werkzeug import", "depth,sem_seg @property def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data,", "methods=['get']) def ping(): return 'pong' @app.route('/train', methods=['post']) def train(): if", "metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence id mismatch: %s vs %s\"", "request_handler=ThorRequestHandler) # used to ensure that we are receiving frames", "id mismatch: %s vs %s\" % ( metadata['sequenceId'], self.sequence_id)) events", "inv = np.unique(b, return_index=True, return_inverse=True) else: _, idx = np.unique(b,", "self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes", "name self.object_id_to_color[name] = c_key def objects_by_type(self, object_type): return [obj for", "None: for ti, t in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys():", "data.find(full_boundary, i + len(full_boundary)) if next_offset < 0: break headers_offset", "self.client_token = None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port =", "self.wfile = wfile self.data = [] def write(self, output): self.data.append(output)", "= read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height))", "next_action: self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id =", "np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data,", "if add_depth_noise: assert width == height,\\ \"Noise supported with square", "camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized: multiplier =", "and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return", "unique_color in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color'])", "import werkzeug.serving import werkzeug.http import numpy as np from enum", "BufferedIO(object): def __init__(self, wfile): self.wfile = wfile self.data = []", "= {} self.files = {} full_boundary = b'\\r\\n--' + boundary", "= b'\\r\\n--' + boundary view = memoryview(data) i = data.find(full_boundary)", "self.instance_segmentation_frame = None self.class_segmentation_frame = None self.class_detections2D = {} self.process_colors()", "(len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color", "1, Millimeters = 2 class Server(object): def __init__( self, request_queue,", "def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile =", "1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if", "service. Messages are sent to the controller using a pair", "return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self, wfile):", "MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form =", "assert width == height,\\ \"Noise supported with square dimension images", "next_offset] i = next_offset headers = {} for header in", "= False if '|' in cls: cls = cls.split('|')[0] simObj", "= now # import datetime # print(\"%s %s/s\" % (datetime.datetime.now().isoformat(),", "% (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence id", "bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls not in self.class_masks:", "2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset: body_offset] body", "key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1: self.last_event", "in self.metadata['colors']: name = color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] =", "third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) >", "def queue_get(que): res = None while True: try: res =", "data can be retrieved from event.frame and encoded to an", "# Ensure we have a similar API to MultiAgentEvent @property", "import logging import sys import os import os.path try: from", "rate = self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp = now", "= {} self.instance_segmentation_frame = None self.class_segmentation_frame = None self.class_detections2D =", "flask import Flask, request, make_response, abort import werkzeug import werkzeug.serving", "= read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier", "1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids):", "parameter step_size = 0.25 agent_meta = self.metadata['agent'] loc = agent_meta['position']", "horizon) def get_object(self, object_id): for obj in self.metadata['objects']: if obj['objectId']", "np.flip(im, axis=1) if flip_rb_colors: im = im[..., ::-1] return im", "* multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame", "abort(403) if self.frame_counter % self.debug_frames_per_interval == 0: now = time.time()", "obj['objectId'] == object_id: return obj return None class MultipartFormParser(object): @staticmethod", "Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if", "MIN_DETECTION_LEN = 0 self.instance_detections2D = {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1,", "third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height, flip_y=True, flip_x=False,", "def add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs)", "im[..., ::-1] return im def unique_rows(arr, return_index=False, return_inverse=False): arr =", "Ensure we have a similar API to MultiAgentEvent @property def", "(int(loc['x'] / step_size), int(loc['z'] / step_size), rotation, horizon) def get_object(self,", "simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls", "process_colors_ids(self): if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN = 0 self.instance_detections2D", "image_depth_out.astype(np.float32) if 'add_noise' in kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise(", "self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp = time.time() self.frame_counter =", "Institute for Artificial Intelligence 2017 \"\"\" ai2thor.server Handles all communication", ") return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier =", "self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height)", "= i + len(full_boundary) + 2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset)", "def train(): if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers))", "self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = [] self.third_party_normals_frames", "np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self,", "warnings from flask import Flask, request, make_response, abort import werkzeug", "= kwargs['depth_format'] image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) +", "@property def closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile", "pass return res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj,", "boundary): self.form = {} self.files = {} full_boundary = b'\\r\\n--'", "read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def", "headers_offset = i + len(full_boundary) + 2 body_offset = data.find(b'\\r\\n\\r\\n',", "form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action = queue_get(response_queue)", "a pair of request/response queues. \"\"\" import json import logging", "None self.depth_frame = None self.normals_frame = None self.flow_frame = None", "from a call to controller.step(). This class wraps the screenshot", "unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1)) ==", "self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width,", "i = next_offset headers = {} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"):", "np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds in", "def __init__(self, metadata): self.metadata = metadata self.screen_width = metadata['screenWidth'] self.screen_height", "import Flask, request, make_response, abort import werkzeug import werkzeug.serving import", "= image_depth_out.astype(np.float32) if 'add_noise' in kwargs and kwargs['add_noise']: depth_image_float =", "else: self.last_event = event = events[0] for img in form.files.get('image-thirdParty-camera',", "in image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict(", "= self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def", "'colors' in self.metadata and self.metadata['colors']: for color_data in self.metadata['colors']: name", "self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls not in self.class_masks: self.class_masks[cls]", "self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self,", "!= self.sequence_id: raise ValueError(\"Sequence id mismatch: %s vs %s\" %", "[] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames = [] self.third_party_normals_frames = []", "im = np.flip(im, axis=0) if flip_x: im = np.flip(im, axis=1)", "self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def", "= (obj['visible'] and obj['objectId'] in self.instance_detections2D) def process_colors(self): if 'colors'", "headers = {} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':')", "def get_boundary(request_headers): for h, value in request_headers: if h ==", "if 'add_noise' in kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float,", "!= self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval == 0: now", "flip_rb_colors: im = im[..., ::-1] return im def unique_rows(arr, return_index=False,", "to an image format\") return None def process_visible_bounds2D(self): if self.instance_detections2D", "= ct_opts['boundary'].encode('ascii') return boundary return None def __init__(self, data, boundary):", "!= DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format ==", "MIN_DETECTION_LEN or (bb[3] - bb[1]) < MIN_DETECTION_LEN): if cls not", "if '|' in cls: cls = cls.split('|')[0] simObj = True", "1 next_action['sequenceId'] = self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp =", "else: _, idx = np.unique(b, return_index=True) unique = arr[idx] if", ">= 0: next_offset = data.find(full_boundary, i + len(full_boundary)) if next_offset", "header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()] = v.strip() ctype,", "= next_offset headers = {} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v", "in enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if key in form.files:", "multiplier = kwargs['camera_far_plane'] - kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier", "= {} full_boundary = b'\\r\\n--' + boundary view = memoryview(data)", "self.instance_detections2D) def process_colors(self): if 'colors' in self.metadata and self.metadata['colors']: for", "cls.split('|')[0] simObj = True bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight']", "None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp", "self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def", "1 next_action = queue_get(response_queue) if 'sequenceId' not in next_action: self.sequence_id", "Artificial Intelligence 2017 \"\"\" ai2thor.server Handles all communication with Unity", "logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 # get with", "next_offset = data.find(full_boundary, i + len(full_boundary)) if next_offset < 0:", "self.last_rate_timestamp = time.time() self.frame_counter = 0 self.debug_frames_per_interval = 50 self.xwindow_id", "has been removed - RGB data can be retrieved from", "np.unique(b, return_index=True) unique = arr[idx] if return_index and return_inverse: return", "= form.form['token'] if self.client_token and token != self.client_token: abort(403) if", "self.third_party_flows_frames = [] self.events = [self] # Ensure we have", "if flip_rb_colors: im = im[..., ::-1] return im def unique_rows(arr,", "if len(events) > 1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events)", "def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self,", "cdisp == 'form-data' if 'filename' in cd_opts: if cd_opts['name'] not", "round(loc['z'] * 1000), rotation, horizon) @property def pose_discrete(self): # XXX", "next_offset < 0: break headers_offset = i + len(full_boundary) +", "None self.flow_frame = None self.color_to_object_id = {} self.object_id_to_color = {}", "werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if", "self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def", "= 1, Millimeters = 2 class Server(object): def __init__( self,", "2 class Server(object): def __init__( self, request_queue, response_queue, host, port=0,", "request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ):", "if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda", "else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1) if flip_y:", "if request.headers['Content-Type'].split(';')[0] == 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata =", "**kwargs): multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane =", "unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width,", "sent to the controller using a pair of request/response queues.", "# XXX add methods for depth,sem_seg @property def cv2img(self): return", "from Queue import Empty import time import warnings from flask", "= {} self.instance_detections2D = None self.instance_masks = {} self.class_masks =", "= self.metadata['agent'] loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y'] * 1000)", "as a parameter step_size = 0.25 agent_meta = self.metadata['agent'] loc", "third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1: self.last_event = event =", "def add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame", "import time import warnings from flask import Flask, request, make_response,", "if 'sequenceId' not in next_action: self.sequence_id += 1 next_action['sequenceId'] =", "x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals,", "data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset: body_offset] body = view[body_offset +", "self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind,", ") for key in image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i])", "self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated - please use event.cv2img\") return self.cv2img", "have a similar API to MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data", "1000), round(loc['z'] * 1000), rotation, horizon) @property def pose_discrete(self): #", "in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form:", "= apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self,", "and return_inverse: return unique, idx, inv elif return_index: return unique,", "form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events) > 1: self.last_event = event", "self.metadata['colors']: name = color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name", "@app.route('/ping', methods=['get']) def ping(): return 'pong' @app.route('/train', methods=['post']) def train():", "'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token =", "image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def", "methods for depth,sem_seg @property def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self,", "cls not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls]", "= 50 self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app,", "= 0 self.instance_detections2D = {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3),", "= round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] * 1000), round(loc['z'] *", "else: form = request metadata = json.loads(form.form['metadata']) token = form.form['token']", "def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width, height,", "return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def __init__(self, wfile): self.wfile =", "= read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height))", "is None: return MIN_DETECTION_LEN = 0 self.instance_detections2D = {} unique_ids,", "width == height,\\ \"Noise supported with square dimension images only.\"", "MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event = events[0] for img", "apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self, image_depth_data,", "c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key def", ").reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data,", "form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x,", "\"\"\" ai2thor.server Handles all communication with Unity through a Flask", "for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1", "result class MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event = events[active_agent_id]", "obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in self.instance_detections2D) def process_colors(self): if", "self.frame[...,::-1] @property def pose(self): agent_meta = self.metadata['agent'] loc = agent_meta['position']", "cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if 'filename' in", "self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames =", "Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format,", "noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in", "return_index=True, return_inverse=True) else: _, idx = np.unique(b, return_index=True) unique =", "form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token = form.form['token'][0]", "), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in image_mapping.keys():", "= {} self.object_id_to_color = {} self.instance_detections2D = None self.instance_masks =", "* arr.shape[1]))) if return_inverse: _, idx, inv = np.unique(b, return_index=True,", "= None self.flow_frame = None self.color_to_object_id = {} self.object_id_to_color =", "port self.last_rate_timestamp = time.time() self.frame_counter = 0 self.debug_frames_per_interval = 50", "simObj = True bb = np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] -", "in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc", "os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None self.app = app self.client_token", "self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width,", "Copyright Allen Institute for Artificial Intelligence 2017 \"\"\" ai2thor.server Handles", "MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h, value in request_headers: if", "i >= 0: next_offset = data.find(full_boundary, i + len(full_boundary)) if", "self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] - bb[0]) < MIN_DETECTION_LEN or", "self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames =", "= {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = []", "agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y'] /", "- RGB data can be retrieved from event.frame and encoded", "def pose_discrete(self): # XXX should have this as a parameter", "= 0.1 self.camera_far_plane = 20.0 self.depth_format = depth_format self.add_depth_noise =", "img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action", "image_depth_data, depth_format, **kwargs): multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1)", "if self.client_token and token != self.client_token: abort(403) if self.frame_counter %", "in self.metadata['objects']: if obj['objectId'] == object_id: return obj return None", "= None self.app = app self.client_token = None self.subscriptions =", "metadata): self.metadata = metadata self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight']", "= add_depth_noise self.noise_indices = None if add_depth_noise: assert width ==", "/ 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z']", "to controller.step(). This class wraps the screenshot that Unity captures", "( metadata['sequenceId'], self.sequence_id)) events = [] for i, a in", "1000 image_depth_out *= multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32) if", "add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs): self.frame", "boundary = ct_opts['boundary'].encode('ascii') return boundary return None def __init__(self, data,", "class MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event = events[active_agent_id] self.metadata", "= metadata self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame =", "result = super(ThorRequestHandler, self).run_wsgi() self.wfile = old_wfile return result class", "been removed - RGB data can be retrieved from event.frame", "bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] - bb[0]) <", "with Unity through a Flask service. Messages are sent to", "3 \\ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1)", "add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key", "def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width, self.screen_height) def add_third_party_camera_image(self,", "wraps the screenshot that Unity captures as well as the", "self.last_rate_timestamp = now # import datetime # print(\"%s %s/s\" %", "@staticmethod def get_boundary(request_headers): for h, value in request_headers: if h", "* 1000) horizon = round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] *", "image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height))", "return None def __init__(self, data, boundary): self.form = {} self.files", "allow quit def queue_get(que): res = None while True: try:", "round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] * 1000), round(loc['z'] * 1000),", "token != self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval == 0:", "def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier = 1.0 camera_far_plane =", "% ( metadata['sequenceId'], self.sequence_id)) events = [] for i, a", "bb[0]) < MIN_DETECTION_LEN or (bb[3] - bb[1]) < MIN_DETECTION_LEN): if", "image_data(self): warnings.warn(\"Event.image_data has been removed - RGB data can be", "np.flip(im, axis=0) if flip_x: im = np.flip(im, axis=1) if flip_rb_colors:", "view = memoryview(data) i = data.find(full_boundary) while i >= 0:", "agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = round(agent_meta['rotation']['y'] *", "0.1 self.camera_far_plane = 20.0 self.depth_format = depth_format self.add_depth_noise = add_depth_noise", "return boundary return None def __init__(self, data, boundary): self.form =", "def default(self, obj): if isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder,", "1.0 / (camera_far_plane - camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier", "**kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids()", "wfile): self.wfile = wfile self.data = [] def write(self, output):", "as the metadata sent about each object \"\"\" def __init__(self,", "None: return MIN_DETECTION_LEN = 0 self.instance_detections2D = {} unique_ids, unique_inverse", "image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format']", "obj in self.metadata['objects']: if obj['objectId'] == object_id: return obj return", "add_depth_noise=False, width=300, height=300 ): app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)),", "3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :],", "b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx,", "self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width,", "cls = cls.split('|')[0] simObj = True bb = np.array(color_bounds['bounds']) bb[[1,3]]", "not((bb[2] - bb[0]) < MIN_DETECTION_LEN or (bb[3] - bb[1]) <", "return (round(loc['x'] * 1000), round(loc['z'] * 1000), rotation, horizon) @property", "np from enum import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices", "self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events = events self.third_party_camera_frames", "self.metadata = metadata self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame", "read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame", "self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def", "for obj in self.metadata['objects']: if obj['objectId'] == object_id: return obj", "werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if 'filename' in cd_opts: if", "def _image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format", "im_bytes.reshape(height, width, -1) if flip_y: im = np.flip(im, axis=0) if", "arr[idx] if return_index and return_inverse: return unique, idx, inv elif", "retrieved from event.frame and encoded to an image format\") return", "= color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] =", "def read_buffer_image(buf, width, height, flip_y=True, flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes =", "def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self,", "in enumerate(metadata['agents']): e = Event(a) image_mapping = dict( image=e.add_image, image_depth=lambda", "return unique, idx elif return_inverse: return unique, inv else: return", "__init__(self, metadata): self.metadata = metadata self.screen_width = metadata['screenWidth'] self.screen_height =", "None self.camera_near_plane = 0.1 self.camera_far_plane = 20.0 self.depth_format = depth_format", "_, idx = np.unique(b, return_index=True) unique = arr[idx] if return_index", "add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated - please", "Queue import Empty import time import warnings from flask import", "unique = arr[idx] if return_index and return_inverse: return unique, idx,", "unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks", "headers[k.strip()] = v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts =", "= self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] - bb[0]) < MIN_DETECTION_LEN", "unique_masks[color_ind, ...] if cls not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind,", "= metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame = None self.depth_frame =", "events) else: self.last_event = event = events[0] for img in", "self.image_buffer = None self.app = app self.client_token = None self.subscriptions", "= 0 self.last_event = None self.camera_near_plane = 0.1 self.camera_far_plane =", "= [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0, Normalized =", "self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0, Normalized = 1, Millimeters", "add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width,", "using a pair of request/response queues. \"\"\" import json import", "(camera_far_plane - camera_near_plane) elif depth_format == DepthFormat.Millimeters: multiplier = 1000.0", "cd_opts['name'] not in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if", "self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated - please use event.cv2img\")", "import Empty except ImportError: from Queue import Empty import time", "color_data in self.metadata['colors']: name = color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key]", "for h, value in request_headers: if h == 'Content-Type': ctype,", "image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes,", "ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp", "= None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames = []", "get_boundary(request_headers): for h, value in request_headers: if h == 'Content-Type':", "obj return None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h,", "cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane", "json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form = request metadata =", "image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in image_mapping.keys(): if", "the metadata sent about each object \"\"\" def __init__(self, metadata):", "+ 2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset: body_offset]", "if flip_x: im = np.flip(im, axis=1) if flip_rb_colors: im =", "idx, inv = np.unique(b, return_index=True, return_inverse=True) else: _, idx =", "in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name", "import Empty import time import warnings from flask import Flask,", "self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def read_buffer_image(buf, width,", "not in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters", "= None while True: try: res = que.get(block=True, timeout=0.5) break", "app self.client_token = None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False) self.port", "enumerate(a['thirdPartyCameras']): for key in third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti])", "im = np.flip(im, axis=1) if flip_rb_colors: im = im[..., ::-1]", "image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] / np.float32(256 ** 2) multiplier", "== object_id: return obj return None class MultipartFormParser(object): @staticmethod def", "None self.color_to_object_id = {} self.object_id_to_color = {} self.instance_detections2D = None", "- bb[[3,1]] if not((bb[2] - bb[0]) < MIN_DETECTION_LEN or (bb[3]", "= werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data'", "/ np.float32(256) + image_depth[:,:,2] / np.float32(256 ** 2) multiplier =", "self.process_visible_bounds2D() self.third_party_camera_frames = [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = []", "host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app =", "import werkzeug.http import numpy as np from enum import Enum", "- self.last_rate_timestamp) self.last_rate_timestamp = now # import datetime # print(\"%s", "for key in image_mapping.keys(): if key in form.files: image_mapping[key](form.files[key][i]) third_party_image_mapping", "'filename' in cd_opts: if cd_opts['name'] not in self.files: self.files[cd_opts['name']] =", "= self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background') cls = color_name", "memoryview(data) i = data.find(full_boundary) while i >= 0: next_offset =", "tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key def objects_by_type(self, object_type):", "return_index: return unique, idx elif return_inverse: return unique, inv else:", "self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color),", "None def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) > 0: for", "def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def", "return result class MultiAgentEvent(object): def __init__(self, active_agent_id, events): self._active_event =", "wfile self.data = [] def write(self, output): self.data.append(output) def flush(self):", "view[headers_offset: body_offset] body = view[body_offset + 4: next_offset] i =", "None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used", "image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not None: for ti, t", "return_index=True) unique = arr[idx] if return_index and return_inverse: return unique,", "= cls.split('|')[0] simObj = True bb = np.array(color_bounds['bounds']) bb[[1,3]] =", "0 self.debug_frames_per_interval = 50 self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host,", "class DepthFormat(Enum): Meters = 0, Normalized = 1, Millimeters =", "0: next_offset = data.find(full_boundary, i + len(full_boundary)) if next_offset <", "step_size), int(loc['z'] / step_size), rotation, horizon) def get_object(self, object_id): for", "= 1.0 if depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane'] -", "rate)) if metadata['sequenceId'] != self.sequence_id: raise ValueError(\"Sequence id mismatch: %s", "color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for", "+= 1 next_action = queue_get(response_queue) if 'sequenceId' not in next_action:", "add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame =", "e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes,", "__init__(self, wfile): self.wfile = wfile self.data = [] def write(self,", "image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if a['thirdPartyCameras'] is not None: for", "not in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if ctype", "time.time() self.frame_counter = 0 self.debug_frames_per_interval = 50 self.xwindow_id = None", "# get with timeout to allow quit def queue_get(que): res", "= Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None", "[] self.third_party_flows_frames = [] self.events = [self] # Ensure we", "object_id): for obj in self.metadata['objects']: if obj['objectId'] == object_id: return", "**kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data):", "return MIN_DETECTION_LEN = 0 self.instance_detections2D = {} unique_ids, unique_inverse =", "= int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z'] / step_size), rotation,", "depth_format, **kwargs): multiplier = 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane", "through a Flask service. Messages are sent to the controller", "= self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events =", "timeout=0.5) break except Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder): def", "json import logging import sys import os import os.path try:", "def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame", "= np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj: self.instance_detections2D[color_name] = bb", "self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs))", "= 0, Normalized = 1, Millimeters = 2 class Server(object):", "kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] )", "time.time() # rate = self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp", "**kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data):", "self.sequence_id: raise ValueError(\"Sequence id mismatch: %s vs %s\" % (", "for cc in color), 'background') cls = color_name simObj =", "each object \"\"\" def __init__(self, metadata): self.metadata = metadata self.screen_width", "try: from queue import Empty except ImportError: from Queue import", "= old_wfile return result class MultiAgentEvent(object): def __init__(self, active_agent_id, events):", "and obj['objectId'] in self.instance_detections2D) def process_colors(self): if 'colors' in self.metadata", "time import warnings from flask import Flask, request, make_response, abort", "sent self.sequence_id = 0 self.last_event = None self.camera_near_plane = 0.1", "for color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc)", "returned from a call to controller.step(). This class wraps the", "cc in color), 'background') cls = color_name simObj = False", "and encoded to an image format\") return None def process_visible_bounds2D(self):", "name = color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name]", "self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width,", "def __init__(self, data, boundary): self.form = {} self.files = {}", "self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in self.instance_detections2D) def process_colors(self):", "to MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data has been removed -", "): app = Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer", "= [] self.events = [self] # Ensure we have a", "2) multiplier = 1.0 if depth_format != DepthFormat.Normalized: multiplier =", "kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices'] ) return depth_image_float", "def process_colors(self): if 'colors' in self.metadata and self.metadata['colors']: for color_data", "if depth_format == DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane -", "for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()] = v.strip()", "else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data, **kwargs):", "closed(self): return self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile", "not None: for ti, t in enumerate(a['thirdPartyCameras']): for key in", "np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _,", "if isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object):", "dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3", "[]): self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action = queue_get(response_queue) if", "**kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def", "self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close() @property", "NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.generic): return np.asscalar(obj) return", "self).run_wsgi() self.wfile = old_wfile return result class MultiAgentEvent(object): def __init__(self,", "'..', 'templates'))) self.image_buffer = None self.app = app self.client_token =", "self.screen_height = metadata['screenHeight'] self.frame = None self.depth_frame = None self.normals_frame", "== 'form-data' if 'filename' in cd_opts: if cd_opts['name'] not in", "self.metadata and self.metadata['colors']: for color_data in self.metadata['colors']: name = color_data['name']", "add_third_party_image_ids(self, image_ids_data): self.third_party_instance_segmentation_frames.append(read_buffer_image(image_ids_data, self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame =", "ValueError(\"Sequence id mismatch: %s vs %s\" % ( metadata['sequenceId'], self.sequence_id))", "that Unity captures as well as the metadata sent about", "DepthFormat.Normalized: multiplier = 1.0 / (camera_far_plane - camera_near_plane) elif depth_format", "[obj for obj in self.metadata['objects'] if obj['objectType'] == object_type] def", "sys.version_info.major < 3 \\ else np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height,", "'add_noise' in kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width,", "break except Empty: pass return res class NumpyAwareEncoder(json.JSONEncoder): def default(self,", "Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1'", "self.frame_counter % self.debug_frames_per_interval == 0: now = time.time() # rate", "object_type] def process_colors_ids(self): if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN =", "unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1,", "action we sent self.sequence_id = 0 self.last_event = None self.camera_near_plane", "color_data['name'] c_key = tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key", "= 0.25 agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation =", "that is returned from a call to controller.step(). This class", "self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return 'pong' @app.route('/train',", "use event.cv2img\") return self.cv2img @property def cv2img(self): return self.frame[...,::-1] @property", "removed - RGB data can be retrieved from event.frame and", "return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1])))", "not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls] =", "metadata self.screen_width = metadata['screenWidth'] self.screen_height = metadata['screenHeight'] self.frame = None", "if flip_y: im = np.flip(im, axis=0) if flip_x: im =", "self.last_event = event = events[0] for img in form.files.get('image-thirdParty-camera', []):", "cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def", "metadata['screenHeight'] self.frame = None self.depth_frame = None self.normals_frame = None", "= [] self.third_party_class_segmentation_frames = [] self.third_party_instance_segmentation_frames = [] self.third_party_depth_frames =", "from enum import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR)", "= color_name simObj = False if '|' in cls: cls", "None self.normals_frame = None self.flow_frame = None self.color_to_object_id = {}", "def image_data(self): warnings.warn(\"Event.image_data has been removed - RGB data can", "self.noise_indices = None if add_depth_noise: assert width == height,\\ \"Noise", "if self.frame_counter % self.debug_frames_per_interval == 0: now = time.time() #", "in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls],", "** 2) multiplier = 1.0 if depth_format != DepthFormat.Normalized: multiplier", "to the controller using a pair of request/response queues. \"\"\"", "multiplier = 1000.0 image_depth = read_buffer_image( image_depth_data, self.screen_width, self.screen_height, **kwargs", "token = form.form['token'][0] else: form = request metadata = json.loads(form.form['metadata'])", "next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever()", "ImportError: from Queue import Empty import time import warnings from", ":, :], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for", "abort import werkzeug import werkzeug.serving import werkzeug.http import numpy as", "Object that is returned from a call to controller.step(). This", "not in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids", "== 'Content-Type': ctype, ct_opts = werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return", "= unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2]) unique_masks = (np.tile(unique_inverse[np.newaxis,", "im = im_bytes.reshape(height, width, -1) if flip_y: im = np.flip(im,", "if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if", "= [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if", "self.last_event = event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event", "in color), 'background') cls = color_name simObj = False if", "import json import logging import sys import os import os.path", "h, value in request_headers: if h == 'Content-Type': ctype, ct_opts", "the action we sent self.sequence_id = 0 self.last_event = None", "self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated - please use event.cv2img\") return", "= events self.third_party_camera_frames = [] # XXX add methods for", "unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self,", "if cd_opts['name'] not in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else:", "= body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form: self.form[cd_opts['name']] = []", "def add_third_party_image_depth(self, image_depth_data, **kwargs): self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data,", "= v.strip() ctype, ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition'])", "= np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \\ else np.frombuffer(buf,", "image_normals=e.add_image_normals, image_flows=e.add_image_flows ) for key in image_mapping.keys(): if key in", "data.find(full_boundary) while i >= 0: next_offset = data.find(full_boundary, i +", "Meters = 0, Normalized = 1, Millimeters = 2 class", "%s vs %s\" % ( metadata['sequenceId'], self.sequence_id)) events = []", "int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z'] / step_size), rotation, horizon)", "add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data):", "self.files = {} full_boundary = b'\\r\\n--' + boundary view =", "def __init__( self, request_queue, response_queue, host, port=0, threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False,", "self.camera_near_plane = 0.1 self.camera_far_plane = 20.0 self.depth_format = depth_format self.add_depth_noise", "events self.third_party_camera_frames = [] # XXX add methods for depth,sem_seg", "(obj['visible'] and obj['objectId'] in self.instance_detections2D) def process_colors(self): if 'colors' in", "import werkzeug import werkzeug.serving import werkzeug.http import numpy as np", "np.float32(256) + image_depth[:,:,2] / np.float32(256 ** 2) multiplier = 1.0", "mismatch: %s vs %s\" % ( metadata['sequenceId'], self.sequence_id)) events =", "generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH = 5000 # get", "events.append(e) if len(events) > 1: self.last_event = event = MultiAgentEvent(metadata['activeAgentId'],", "...]) def _image_depth(self, image_depth_data, **kwargs): image_depth = read_buffer_image(image_depth_data, self.screen_width, self.screen_height)", "im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \\ else", "self).default(obj) class BufferedIO(object): def __init__(self, wfile): self.wfile = wfile self.data", "def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b = arr.view(np.dtype((np.void,", "image_depth_out = image_depth[:,:,0] + image_depth[:,:,1] / np.float32(256) + image_depth[:,:,2] /", "= json.loads(form.form['metadata'][0]) token = form.form['token'][0] else: form = request metadata", "form = request metadata = json.loads(form.form['metadata']) token = form.form['token'] if", "'background') cls = color_name simObj = False if '|' in", "1000) return (round(loc['x'] * 1000), round(loc['z'] * 1000), rotation, horizon)", "used to ensure that we are receiving frames for the", "'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in", "next_offset headers = {} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v =", "@property def image_data(self): warnings.warn(\"Event.image_data has been removed - RGB data", "[] for i, a in enumerate(metadata['agents']): e = Event(a) image_mapping", "= im_bytes.reshape(height, width, -1) if flip_y: im = np.flip(im, axis=0)", "self.debug_frames_per_interval / float(now - self.last_rate_timestamp) self.last_rate_timestamp = now # import", "self.normals_frame = None self.flow_frame = None self.color_to_object_id = {} self.object_id_to_color", "self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) # used to ensure that we", "# print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId'] != self.sequence_id:", "enum import Enum from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version", "enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color = np.array(color_bounds['color']) color_name =", "kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized: multiplier = 1.0 /", "= json.loads(form.form['metadata']) token = form.form['token'] if self.client_token and token !=", "self.screen_width, self.screen_height) def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self,", "self.events = events self.third_party_camera_frames = [] # XXX add methods", "= self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events = events self.third_party_camera_frames =", "_, idx, inv = np.unique(b, return_index=True, return_inverse=True) else: _, idx", "elif return_index: return unique, idx elif return_inverse: return unique, inv", "= read_buffer_image(image_depth_data, self.screen_width, self.screen_height) depth_format = kwargs['depth_format'] image_depth_out = image_depth[:,:,0]", "= bb self.instance_masks[color_name] = unique_masks[color_ind, ...] if cls not in", "self.debug_frames_per_interval = 50 self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host, self.port,", "[] self.third_party_normals_frames = [] self.third_party_flows_frames = [] self.events = [self]", "= None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded, request_handler=ThorRequestHandler) #", "- color), axis=1)) if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] =", "camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids, image_classes=e.add_image_classes, image_normals=e.add_image_normals, image_flows=e.add_image_flows )", "as well as the metadata sent about each object \"\"\"", "step_size), rotation, horizon) def get_object(self, object_id): for obj in self.metadata['objects']:", "- bb[1]) < MIN_DETECTION_LEN): if cls not in self.class_detections2D: self.class_detections2D[cls]", "def run_wsgi(self): old_wfile = self.wfile self.wfile = BufferedIO(self.wfile) result =", "for the action we sent self.sequence_id = 0 self.last_event =", "obj in self.metadata['objects'] if obj['objectType'] == object_type] def process_colors_ids(self): if", "self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data):", "self.data = [] def write(self, output): self.data.append(output) def flush(self): self.wfile.write(b\"\".join(self.data))", "images only.\" self.noise_indices = generate_noise_indices(width) @app.route('/ping', methods=['get']) def ping(): return", "color), axis=1)) if simObj: self.instance_detections2D[color_name] = bb self.instance_masks[color_name] = unique_masks[color_ind,", "return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier = 1.0", "0.25 agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y']", "+ len(full_boundary)) if next_offset < 0: break headers_offset = i", "image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height) self.process_colors_ids() def add_third_party_image_ids(self, image_ids_data):", "self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body) else: if ctype == 'text/plain'", "if self.instance_segmentation_frame is None: return MIN_DETECTION_LEN = 0 self.instance_detections2D =", "cv2img(self): return self.frame[...,::-1] @property def pose(self): agent_meta = self.metadata['agent'] loc", "body_offset] body = view[body_offset + 4: next_offset] i = next_offset", "def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data, self.screen_width, self.screen_height)) def cv2image(self): warnings.warn(\"Deprecated -", "obj): if isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class", "(round(loc['x'] * 1000), round(loc['z'] * 1000), rotation, horizon) @property def", "def __init__(self, active_agent_id, events): self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata", "np.arange(len(unique_ids))[:, np.newaxis, np.newaxis]) #for unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds", "self.metadata['agent'] loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0) horizon", "inv else: return unique class Event(object): \"\"\" Object that is", "multiplier self.depth_frame = image_depth.astype(np.float32) def add_image_depth(self, image_depth_data, **kwargs): self.depth_frame =", "Flask(__name__, template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None self.app", "a parameter step_size = 0.25 agent_meta = self.metadata['agent'] loc =", "for obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and obj['objectId'] in", "= event = MultiAgentEvent(metadata['activeAgentId'], events) else: self.last_event = event =", "self.class_segmentation_frame = None self.class_detections2D = {} self.process_colors() self.process_visible_bounds2D() self.third_party_camera_frames =", "= {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse =", "axis=0) if flip_x: im = np.flip(im, axis=1) if flip_rb_colors: im", "return_inverse: return unique, inv else: return unique class Event(object): \"\"\"", "image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height) def add_third_party_image_classes(self, image_classes_data): self.third_party_class_segmentation_frames.append(read_buffer_image(image_classes_data,", "/ 256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise' in kwargs and", "color = np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc in color),", "None def __init__(self, data, boundary): self.form = {} self.files =", "[] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0, Normalized = 1,", "self.wfile self.wfile = BufferedIO(self.wfile) result = super(ThorRequestHandler, self).run_wsgi() self.wfile =", "= event = events[0] for img in form.files.get('image-thirdParty-camera', []): self.last_event.add_third_party_camera_image(img)", "return res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.generic):", "== 'multipart/form-data': form = MultipartFormParser(request.get_data(), MultipartFormParser.get_boundary(request.headers)) metadata = json.loads(form.form['metadata'][0]) token", "simObj = False if '|' in cls: cls = cls.split('|')[0]", "self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data,", "add_depth_noise self.noise_indices = None if add_depth_noise: assert width == height,\\", "if cls not in self.class_detections2D: self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind", "encoded to an image format\") return None def process_visible_bounds2D(self): if", "Intelligence 2017 \"\"\" ai2thor.server Handles all communication with Unity through", "multiplier = 1.0 if depth_format != DepthFormat.Normalized: multiplier = kwargs['camera_far_plane']", "call to controller.step(). This class wraps the screenshot that Unity", "should have this as a parameter step_size = 0.25 agent_meta", "self.third_party_flows_frames.append(read_buffer_image(flows_data, self.screen_width, self.screen_height)) def add_image_flows(self, image_flows_data): self.flows_frame = read_buffer_image(image_flows_data, self.screen_width,", "= tuple(color_data['color']) self.color_to_object_id[c_key] = name self.object_id_to_color[name] = c_key def objects_by_type(self,", "kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format == DepthFormat.Normalized:", "in kwargs and kwargs['add_noise']: depth_image_float = apply_real_noise( depth_image_float, self.screen_width, indices=kwargs['noise_indices']", "= 1.0 camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0)", "self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0,", "self.third_party_depth_frames.append(self._image_depth(image_depth_data, **kwargs)) def add_third_party_image_normals(self, normals_data): self.third_party_normals_frames.append(read_buffer_image(normals_data, self.screen_width, self.screen_height)) def add_image_normals(self,", "= unique_masks[color_ind, ...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def", "kwargs['camera_near_plane'] elif depth_format == DepthFormat.Millimeters: multiplier *= 1000 image_depth_out *=", "def objects_by_type(self, object_type): return [obj for obj in self.metadata['objects'] if", "that we are receiving frames for the action we sent", "image_mapping = dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane,", "np.frombuffer(buf, dtype=dtype) im = im_bytes.reshape(height, width, -1) if flip_y: im", "0 self.instance_detections2D = {} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True)", "self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close() @property def closed(self): return", "'form-data' if 'filename' in cd_opts: if cd_opts['name'] not in self.files:", "50 self.xwindow_id = None self.wsgi_server = werkzeug.serving.make_server(host, self.port, self.app, threaded=threaded,", "...] else: self.class_masks[cls] = np.logical_or(self.class_masks[cls], unique_masks[color_ind, ...]) def _image_depth(self, image_depth_data,", "def pose(self): agent_meta = self.metadata['agent'] loc = agent_meta['position'] rotation =", "have this as a parameter step_size = 0.25 agent_meta =", "queue import Empty except ImportError: from Queue import Empty import", "self.screen_width, self.screen_height)) def add_image_classes(self, image_classes_data): self.class_segmentation_frame = read_buffer_image(image_classes_data, self.screen_width, self.screen_height)", "= data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers = view[headers_offset: body_offset] body = view[body_offset", "= metadata['screenHeight'] self.frame = None self.depth_frame = None self.normals_frame =", "= self.sequence_id else: self.sequence_id = next_action['sequenceId'] resp = make_response(json.dumps(next_action, cls=NumpyAwareEncoder))", "JSONIFY_PRETTYPRINT_REGULAR=False) self.port = port self.last_rate_timestamp = time.time() self.frame_counter = 0", "unique class Event(object): \"\"\" Object that is returned from a", "MultiAgentEvent @property def image_data(self): warnings.warn(\"Event.image_data has been removed - RGB", "> 0: for obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible'] and", "def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height))", "self.client_token: abort(403) if self.frame_counter % self.debug_frames_per_interval == 0: now =", "start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane = init_params['cameraNearPlane'] self.camera_far_plane =", "= data.find(full_boundary, i + len(full_boundary)) if next_offset < 0: break", "next_action = queue_get(response_queue) if 'sequenceId' not in next_action: self.sequence_id +=", "/ step_size), int(loc['z'] / step_size), rotation, horizon) def get_object(self, object_id):", "image_depth_out *= multiplier / 256.0 depth_image_float = image_depth_out.astype(np.float32) if 'add_noise'", "1000) horizon = round(agent_meta['cameraHorizon'] * 1000) return (round(loc['x'] * 1000),", "self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body) class DepthFormat(Enum): Meters = 0, Normalized", "add_image(self, image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def", "self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame", "@property def cv2img(self): return self._active_event.cv2img def add_third_party_camera_image(self, third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width,", "we have a similar API to MultiAgentEvent @property def image_data(self):", "self.camera_far_plane = 20.0 self.depth_format = depth_format self.add_depth_noise = add_depth_noise self.noise_indices", "self.wfile = old_wfile return result class MultiAgentEvent(object): def __init__(self, active_agent_id,", "third_party_image_data): self.third_party_camera_frames.append(read_buffer_image(third_party_image_data, self.screen_width, self.screen_height)) def add_image(self, image_data, **kwargs): self.frame =", "self.last_event.add_third_party_camera_image(img) request_queue.put_nowait(event) self.frame_counter += 1 next_action = queue_get(response_queue) if 'sequenceId'", "object_id: return obj return None class MultipartFormParser(object): @staticmethod def get_boundary(request_headers):", "class BufferedIO(object): def __init__(self, wfile): self.wfile = wfile self.data =", "i + len(full_boundary) + 2 body_offset = data.find(b'\\r\\n\\r\\n', headers_offset) raw_headers", "'templates'))) self.image_buffer = None self.app = app self.client_token = None", "{} unique_ids, unique_inverse = unique_rows(self.instance_segmentation_frame.reshape(-1, 3), return_inverse=True) unique_inverse = unique_inverse.reshape(self.instance_segmentation_frame.shape[:2])", "= name self.object_id_to_color[name] = c_key def objects_by_type(self, object_type): return [obj", "self.class_detections2D[cls] = [] self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1))", ") if a['thirdPartyCameras'] is not None: for ti, t in", "make_response(json.dumps(next_action, cls=NumpyAwareEncoder)) return resp def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params):", "#for unique_color_ind, unique_color in enumerate(unique_ids): for color_bounds in self.metadata['colorBounds']: color", "body = view[body_offset + 4: next_offset] i = next_offset headers", "self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events = events", "self.cv2img @property def cv2img(self): return self.frame[...,::-1] @property def pose(self): agent_meta", "flip_x=False, dtype=np.uint8, flip_rb_colors=False): im_bytes = np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major <", "...] if cls not in self.class_masks: self.class_masks[cls] = unique_masks[color_ind, ...]", "self.depth_frame = None self.normals_frame = None self.flow_frame = None self.color_to_object_id", "= app self.client_token = None self.subscriptions = [] self.app.config.update(PROPAGATE_EXCEPTIONS=False, JSONIFY_PRETTYPRINT_REGULAR=False)", "multiplier = 1.0 / (camera_far_plane - camera_near_plane) elif depth_format ==", "self.frame_counter = 0 self.debug_frames_per_interval = 50 self.xwindow_id = None self.wsgi_server", "indices=kwargs['noise_indices'] ) return depth_image_float def add_image_depth_robot(self, image_depth_data, depth_format, **kwargs): multiplier", "self.object_id_to_color[name] = c_key def objects_by_type(self, object_type): return [obj for obj", "int(agent_meta['rotation']['y'] / 90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size),", "the controller using a pair of request/response queues. \"\"\" import", "(np.tile(unique_inverse[np.newaxis, :, :], (len(unique_ids), 1, 1)) == np.arange(len(unique_ids))[:, np.newaxis, np.newaxis])", "ctype == 'text/plain' and 'charset' in ct_opts: body = body.tobytes().decode(ct_opts['charset'])", "in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts", "template_folder=os.path.realpath( os.path.join( os.path.dirname(os.path.abspath(__file__)), '..', 'templates'))) self.image_buffer = None self.app =", "rotation = round(agent_meta['rotation']['y'] * 1000) horizon = round(agent_meta['cameraHorizon'] * 1000)", "Empty import time import warnings from flask import Flask, request,", "the screenshot that Unity captures as well as the metadata", "unique, idx elif return_inverse: return unique, inv else: return unique", "90.0) horizon = int(round(agent_meta['cameraHorizon'])) return (int(loc['x'] / step_size), int(loc['z'] /", "body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form: self.form[cd_opts['name']] = [] self.form[cd_opts['name']].append(body)", "in third_party_image_mapping.keys(): if key in form.files: third_party_image_mapping[key](form.files[key][ti]) events.append(e) if len(events)", "is returned from a call to controller.step(). This class wraps", "logging import sys import os import os.path try: from queue", "self.class_detections2D[cls].append(bb) color_ind = np.argmin(np.sum(np.abs(unique_ids - color), axis=1)) if simObj: self.instance_detections2D[color_name]", "np.frombuffer(buf.tobytes(), dtype=dtype) if sys.version_info.major < 3 \\ else np.frombuffer(buf, dtype=dtype)", "image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height) def add_third_party_image_flows(self, flows_data): self.third_party_flows_frames.append(read_buffer_image(flows_data,", "= que.get(block=True, timeout=0.5) break except Empty: pass return res class", "self.add_depth_noise = add_depth_noise self.noise_indices = None if add_depth_noise: assert width", "@property def pose_discrete(self): # XXX should have this as a", "self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane = init_params['cameraNearPlane'] self.camera_far_plane = init_params['cameraFarPlane']", "loc = agent_meta['position'] rotation = int(agent_meta['rotation']['y'] / 90.0) horizon =", "for color_data in self.metadata['colors']: name = color_data['name'] c_key = tuple(color_data['color'])", "self.wfile.closed class ThorRequestHandler(werkzeug.serving.WSGIRequestHandler): def run_wsgi(self): old_wfile = self.wfile self.wfile =", "unique, inv else: return unique class Event(object): \"\"\" Object that", "flip_x: im = np.flip(im, axis=1) if flip_rb_colors: im = im[...,", "image_data, **kwargs): self.frame = read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self,", "[] self.events = [self] # Ensure we have a similar", "object_type): return [obj for obj in self.metadata['objects'] if obj['objectType'] ==", "= view[headers_offset: body_offset] body = view[body_offset + 4: next_offset] i", "import datetime # print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate)) if metadata['sequenceId']", "len(self.instance_detections2D) > 0: for obj in self.metadata['objects']: obj['visibleBounds2D'] = (obj['visible']", "return im def unique_rows(arr, return_index=False, return_inverse=False): arr = np.ascontiguousarray(arr).copy() b", "camera_far_plane = kwargs.pop('camera_far_plane', 1) camera_near_plane = kwargs.pop('camera_near_plane', 0) if depth_format", "pose_discrete(self): # XXX should have this as a parameter step_size", "class MultipartFormParser(object): @staticmethod def get_boundary(request_headers): for h, value in request_headers:", "np.array(color_bounds['color']) color_name = self.color_to_object_id.get(tuple(int(cc) for cc in color), 'background') cls", "{} self.object_id_to_color = {} self.instance_detections2D = None self.instance_masks = {}", "= werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if 'filename' in cd_opts:", "body = body.tobytes().decode(ct_opts['charset']) if cd_opts['name'] not in self.form: self.form[cd_opts['name']] =", "cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp == 'form-data' if 'filename'", "def start(self): self.wsgi_server.serve_forever() def set_init_params(self, init_params): self.camera_near_plane = init_params['cameraNearPlane'] self.camera_far_plane", "\"\"\" import json import logging import sys import os import", "raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()] = v.strip() ctype, ct_opts =", "image_mapping[key](form.files[key][i]) third_party_image_mapping = dict( image=e.add_image, image_thirdParty_depth=lambda x: e.add_third_party_image_depth( x, depth_format=self.depth_format,", "unique, idx, inv elif return_index: return unique, idx elif return_inverse:", "\"Noise supported with square dimension images only.\" self.noise_indices = generate_noise_indices(width)", "x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane, add_noise=self.add_depth_noise, noise_indices=self.noise_indices ), image_ids=e.add_image_ids,", "= arr.view(np.dtype((np.void, arr.dtype.itemsize * arr.shape[1]))) if return_inverse: _, idx, inv", "flush(self): self.wfile.write(b\"\".join(self.data)) self.wfile.flush() def close(self): return self.wfile.close() @property def closed(self):", "cd_opts: if cd_opts['name'] not in self.files: self.files[cd_opts['name']] = [] self.files[cd_opts['name']].append(body)", "return None def process_visible_bounds2D(self): if self.instance_detections2D and len(self.instance_detections2D) > 0:", "to allow quit def queue_get(que): res = None while True:", "event.cv2img\") return self.cv2img @property def cv2img(self): return self.frame[...,::-1] @property def", "image_flows=e.add_image_flows ) for key in image_mapping.keys(): if key in form.files:", "self.screen_height, **kwargs ).reshape(self.screen_height, self.screen_width) * multiplier self.depth_frame = image_depth.astype(np.float32) def", "read_buffer_image(image_data, self.screen_width, self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data,", "active_agent_id, events): self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width =", "= np.flip(im, axis=1) if flip_rb_colors: im = im[..., ::-1] return", "werkzeug.http.parse_options_header(value) boundary = ct_opts['boundary'].encode('ascii') return boundary return None def __init__(self,", "'|' in cls: cls = cls.split('|')[0] simObj = True bb", "= dict( image=e.add_image, image_depth=lambda x: e.add_image_depth( x, depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane,", "self.screen_width, self.screen_height)) def add_image_normals(self, image_normals_data): self.normals_frame = read_buffer_image(image_normals_data, self.screen_width, self.screen_height)", "self._active_event = events[active_agent_id] self.metadata = self._active_event.metadata self.screen_width = self._active_event.screen_width self.screen_height", "now # import datetime # print(\"%s %s/s\" % (datetime.datetime.now().isoformat(), rate))", "= [] self.third_party_normals_frames = [] self.third_party_flows_frames = [] self.events =", "image_depth_data, **kwargs): self.depth_frame = self._image_depth(image_depth_data, **kwargs) def add_third_party_image_depth(self, image_depth_data, **kwargs):", "dtype=dtype) if sys.version_info.major < 3 \\ else np.frombuffer(buf, dtype=dtype) im", "from ai2thor.util.depth import apply_real_noise, generate_noise_indices logging.getLogger('werkzeug').setLevel(logging.ERROR) werkzeug.serving.WSGIRequestHandler.protocol_version = 'HTTP/1.1' MAX_DEPTH", "+ 4: next_offset] i = next_offset headers = {} for", "add_depth_noise: assert width == height,\\ \"Noise supported with square dimension", "depth_format == DepthFormat.Millimeters: multiplier = 1000.0 image_depth = read_buffer_image( image_depth_data,", "* 1000) return (round(loc['x'] * 1000), round(loc['z'] * 1000), rotation,", "return (int(loc['x'] / step_size), int(loc['z'] / step_size), rotation, horizon) def", "= {} for header in raw_headers.tobytes().decode('ascii').strip().split(\"\\r\\n\"): k,v = header.split(':') headers[k.strip()]", "depth_format=self.depth_format, camera_near_plane=self.camera_near_plane, camera_far_plane=self.camera_far_plane ), image_thirdParty_image_ids=e.add_third_party_image_ids, image_thirdParty_classes=e.add_third_party_image_classes, image_thirdParty_normals=e.add_third_party_image_normals, image_thirdParty_flows=e.add_third_party_image_flows ) if", "multiplier *= 1000 image_depth_out *= multiplier / 256.0 depth_image_float =", "== height,\\ \"Noise supported with square dimension images only.\" self.noise_indices", "ct_opts = werkzeug.http.parse_options_header(headers['Content-Type']) cdisp, cd_opts = werkzeug.http.parse_options_header(headers['Content-disposition']) assert cdisp ==", "= 2 class Server(object): def __init__( self, request_queue, response_queue, host,", "= unique_masks[color_ind, ...] if cls not in self.class_masks: self.class_masks[cls] =", "<filename>ai2thor/server.py<gh_stars>1-10 # Copyright Allen Institute for Artificial Intelligence 2017 \"\"\"", "np.array(color_bounds['bounds']) bb[[1,3]] = self.metadata['screenHeight'] - bb[[3,1]] if not((bb[2] - bb[0])", "res class NumpyAwareEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, np.generic): return", "not in next_action: self.sequence_id += 1 next_action['sequenceId'] = self.sequence_id else:", "if next_offset < 0: break headers_offset = i + len(full_boundary)", "self.screen_height, **kwargs) def add_image_ids(self, image_ids_data): self.instance_segmentation_frame = read_buffer_image(image_ids_data, self.screen_width, self.screen_height)", "self._active_event.screen_width self.screen_height = self._active_event.screen_height self.events = events self.third_party_camera_frames = []", "self.last_rate_timestamp) self.last_rate_timestamp = now # import datetime # print(\"%s %s/s\"", "= 5000 # get with timeout to allow quit def", "threaded=False, depth_format=DepthFormat.Meters, add_depth_noise=False, width=300, height=300 ): app = Flask(__name__, template_folder=os.path.realpath(", "isinstance(obj, np.generic): return np.asscalar(obj) return super(NumpyAwareEncoder, self).default(obj) class BufferedIO(object): def" ]
[ "author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [", "install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],}, package_data={'mydocstring':", "Google-style docstrings to plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT',", "setuptools import setup setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for extracting and", "url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts':", "extracting and converting Google-style docstrings to plain-text, markdown, and JSON.\"\"\",", "and converting Google-style docstrings to plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring',", "setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for extracting and converting Google-style docstrings", "JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = {", "version='0.2.7', description=\"\"\"A tool for extracting and converting Google-style docstrings to", "to plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako',", "from setuptools import setup setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for extracting", "for extracting and converting Google-style docstrings to plain-text, markdown, and", "license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main',", "tool for extracting and converting Google-style docstrings to plain-text, markdown,", "<filename>setup.py from setuptools import setup setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for", "'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],}, package_data={'mydocstring': ['templates/google_docstring.md']},", "markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points", "entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],}, package_data={'mydocstring': ['templates/google_docstring.md']}, zip_safe=False)", "description=\"\"\"A tool for extracting and converting Google-style docstrings to plain-text,", "docstrings to plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'],", "import setup setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for extracting and converting", "packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points = { 'console_scripts': [ 'mydocstring=mydocstring.docstring:main', ],},", "and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'], entry_points =", "converting Google-style docstrings to plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\",", "plain-text, markdown, and JSON.\"\"\", url='http://github.com/ooreilly/mydocstring', author=\"<NAME>\", license='MIT', packages=['mydocstring'], install_requires=['mako', 'docopt'],", "setup setup(name='mydocstring', version='0.2.7', description=\"\"\"A tool for extracting and converting Google-style" ]
[ "-*- coding: utf-8 -*- # # Copyright (c) 2020~2999 -", "import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name = 'bson' def", "kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs) except", "# ---------- # # ---------- import bson import struct from", "# ---------- import bson import struct from ..err import SerializeError", "return bson.loads(b, **kwargs) except Exception as e: raise SerializeError(e) def", "..err import SerializeError from ..abc import * from ..core import", "..core import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name = 'bson'", "import * from ..core import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer):", "= {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs) except Exception", "options: dict) -> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try:", "self.check_options(options) try: return bson.dumps(obj, **kwargs) except Exception as e: raise", "import struct from ..err import SerializeError from ..abc import *", "# -*- coding: utf-8 -*- # # Copyright (c) 2020~2999", "bytes, options: dict) -> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options)", "{} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs) except Exception as", "import bson import struct from ..err import SerializeError from ..abc", "* from ..core import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name", "-> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj,", "= 'bson' def loadb(self, b: bytes, options: dict) -> Any:", "kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs) except Exception as e:", "from ..abc import * from ..core import register_format @register_format('bson', '.bson')", "struct from ..err import SerializeError from ..abc import * from", "---------- # # ---------- import bson import struct from ..err", "dumpb(self, obj, options: dict) -> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options))", "class BsonSerializer(ISerializer): format_name = 'bson' def loadb(self, b: bytes, options:", "dict) -> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return", "# # ---------- import bson import struct from ..err import", "options: dict) -> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try:", "try: return bson.dumps(obj, **kwargs) except Exception as e: raise SerializeError(e)", "kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs) except", "<<EMAIL>> # ---------- # # ---------- import bson import struct", "def dumpb(self, obj, options: dict) -> bytes: kwargs = {}", "dict) -> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return", "= {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs) except Exception", "Cologler <<EMAIL>> # ---------- # # ---------- import bson import", "bson import struct from ..err import SerializeError from ..abc import", "(c) 2020~2999 - Cologler <<EMAIL>> # ---------- # # ----------", "loadb(self, b: bytes, options: dict) -> Any: kwargs = {}", "bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs)", "- Cologler <<EMAIL>> # ---------- # # ---------- import bson", "def loadb(self, b: bytes, options: dict) -> Any: kwargs =", "import SerializeError from ..abc import * from ..core import register_format", "# Copyright (c) 2020~2999 - Cologler <<EMAIL>> # ---------- #", "SerializeError(e) def dumpb(self, obj, options: dict) -> bytes: kwargs =", "register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name = 'bson' def loadb(self,", "utf-8 -*- # # Copyright (c) 2020~2999 - Cologler <<EMAIL>>", "as e: raise SerializeError(e) def dumpb(self, obj, options: dict) ->", "from ..core import register_format @register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name =", "..abc import * from ..core import register_format @register_format('bson', '.bson') class", "SerializeError from ..abc import * from ..core import register_format @register_format('bson',", "try: return bson.loads(b, **kwargs) except Exception as e: raise SerializeError(e)", "obj, options: dict) -> bytes: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options)", "bson.loads(b, **kwargs) except Exception as e: raise SerializeError(e) def dumpb(self,", "b: bytes, options: dict) -> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options))", "**kwargs) except Exception as e: raise SerializeError(e) def dumpb(self, obj,", "# # Copyright (c) 2020~2999 - Cologler <<EMAIL>> # ----------", "Exception as e: raise SerializeError(e) def dumpb(self, obj, options: dict)", "format_name = 'bson' def loadb(self, b: bytes, options: dict) ->", "except Exception as e: raise SerializeError(e) def dumpb(self, obj, options:", "-> Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b,", "coding: utf-8 -*- # # Copyright (c) 2020~2999 - Cologler", "e: raise SerializeError(e) def dumpb(self, obj, options: dict) -> bytes:", "@register_format('bson', '.bson') class BsonSerializer(ISerializer): format_name = 'bson' def loadb(self, b:", "from ..err import SerializeError from ..abc import * from ..core", "2020~2999 - Cologler <<EMAIL>> # ---------- # # ---------- import", "Copyright (c) 2020~2999 - Cologler <<EMAIL>> # ---------- # #", "BsonSerializer(ISerializer): format_name = 'bson' def loadb(self, b: bytes, options: dict)", "self.check_options(options) try: return bson.loads(b, **kwargs) except Exception as e: raise", "'.bson') class BsonSerializer(ISerializer): format_name = 'bson' def loadb(self, b: bytes,", "Any: kwargs = {} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs)", "raise SerializeError(e) def dumpb(self, obj, options: dict) -> bytes: kwargs", "{} kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.loads(b, **kwargs) except Exception as", "---------- import bson import struct from ..err import SerializeError from", "kwargs.update(Options.pop_origin_kwargs(options)) self.check_options(options) try: return bson.dumps(obj, **kwargs) except Exception as e:", "-*- # # Copyright (c) 2020~2999 - Cologler <<EMAIL>> #", "'bson' def loadb(self, b: bytes, options: dict) -> Any: kwargs" ]
[ "assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g')", "test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\" app.heap_size = 128 app.java_opts =", "config.get('t.j') == [1, 2, 3] assert config.get('t.u') == '192.168.1.3/32' assert", "config2 = ConfigFactory.parse_string( \"\"\" list = ${list} [ 4, 5,", "merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert len(b)", "5, 'b': 5, 'c': 5} == config1 def test_optional_substitution(self): config", "mon'), ('a: 1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2", "[perl] host_modules = aa ${common_modules} bb \"\"\" ) with pytest.raises(ConfigWrongTypeException):", "1 c : ${a} { d : [ ${b} ]", "== 'str' assert config1.get('d') == 'str' assert config1.get('f') == 'str", "relativedelta(months=3)), ('a: 3 mon', '3 mon'), ('a: 1 years', relativedelta(years=1)),", "config3 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR} \"\"\") assert config == {", "here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2", "import timedelta as period class TestConfigParser(object): def test_parse_simple_value(self): config =", "set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec", "== 121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a =", "config.get(\"x.z\") == {'x': [3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self):", "\"\"\" ) assert config['a'] == [ {'a': 1, 'b': 2},", "2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\"", "period(weeks=12)), ('a: 10 days', period(days=10)), ('a: 11 day', period(days=11)), ('a:", "[ [5, 6, 1, 2], [3, 4, 5, 6], [1,", "= ConfigFactory.parse_string( \"\"\" test = line1 \\ line2 test2 =", "+= def \"\"\" ) assert config.get(\"x\") == \" def\" def", "== {'foo': 43, 'baz': 43} assert set(config.keys()) == set(['bar']) def", "'5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)),", "{ \"d\": { \"g\": { \"h\": { \"j\": { \"u\":", "1, 'b': 2} assert config['b'] == {'c': 3, 'd': 4}", "config['foo'] == 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\" common_modules", "42 assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') == 4 def", "// comment 0 g = 6 test # comment 0", "w', period(weeks=12)), ('a: 10 days', period(days=10)), ('a: 11 day', period(days=11)),", "test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} \"\"\" )", "== -121.22 assert config['c'] == .54 assert config['d'] == -.54", "'php', 'python', 'perl'] assert config4.get('full_modules') == ['java', 'php', 'python', 'perl',", "ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a =", "[5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\" x =", "config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102", "3, 'c': 4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" // bar.a should end up as 4 bar", "[5, 6] x = [-3, -2] ${x} \"\"\" ) assert", "${?CUSTOM_MSG} \"\"\") assert config == { 'num': 3, 'retries_msg': 'You", "b = -121.22 c = .54 d = -.54 \"\"\"", "{ c : ${bar.b}, d : 2 } foo.d =", "1 } bar.b = 3 // foo.c should end up", "[ [1, 2] [3, 4] ] \"\"\" ) assert config['a']", "\"\"\" ) assert config['\"a.b.c.d\"'] == 3 assert config['t.d.c'] == 5", "} \"\"\" expected = { 'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab':", "${a} { } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2)", "('a: 1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a: 3 mo',", "include the other config file from 'my_module' config = ConfigFactory.parse_string(", "[1, 2] config3 = ConfigFactory.parse_string( \"\"\" a: [ include url(\"file://{tmp_file}\")", "merged.get(\"b\") assert len(b) == 2 assert b[0] == {'v2': 2}", "@pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a:", "fdin.write(\"{c: 3}\") # add the temp dir to sys.path so", "8 def test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\" a: {b: 1}", "= ConfigFactory.parse_string( \"\"\" e : ${a} { } \"\"\", resolve=False", "name = \"second domain\" } } \"\"\" config = ConfigFactory.parse_string(input_string)", "} data-center-east = {name = \"east\"} ${data-center-generic} \"\"\" ) assert", "y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config =", "3m', period(minutes=3)), ('a: 3 min', '3 min'), ('a: 4 seconds',", "113 micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a: 110 milliseconds',", "{ 'int_from_env': '5' } assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self):", "c = { e: 5, f: 6 } \"\"\" )", "2, 3, 4, 5, 6] assert config.get_list('a') == [1, 2,", "b = 5 b c = b 7 \"\"\" )", "\"\"\" include-database=true \"\"\") assert config == { 'include-database': True }", "assert config2.get('a.b.c') == 'str' assert config2.get('d') == 'test str' assert", "'t' in config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config", "b = merged.get(\"b\") assert len(b) == 1 assert b[0] ==", "h.d: 4 } g.h.k: f d } h.i.m = 7", "test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\" database { name = peopledb", "= 5 } } a.b { c = 7 d", "assert config2.get('host_modules') == ['java', 'php', 'python'] config3 = ConfigFactory.parse_string( \"\"\"", "} e: ${b} \"\"\", resolve=True) assert config == { 'a':", "be an int but on python 2 long with be", "config = ConfigFactory.parse_string( \"\"\" e : ${a} { } \"\"\",", "== '<PASSWORD>' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\" foo: 42", "2} assert config['b'] == {'c': 3, 'd': 4} assert config['c']", "config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string(", "'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\")", "test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x +=", "\"\"\" common_modules = [perl] host_modules = 55 ${common_modules} \"\"\" )", "${bar.b}, d : 2 } foo.d = 4 \"\"\" )", "config2.with_fallback(config1) assert config2.get(\"dict\") == {'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self):", "} } \"\"\") expected_result = { \"a\": { \"d\": {", "== 'a' assert config['b'] == 'b' assert config['c'] == 'c'", "{ name = peopledb pass = <PASSWORD> name = ${?NOT_EXISTS}", "long = 12321321837612378126213217321 negative = -15 \"\"\" ) # on", ") assert config.get(\"x\") == {'a': 1, 'b': 2, 'c': 3,", "pb: 5 } \"\"\") assert 5 == config.b.pb def test_escape_quote(self):", "resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"list\") == [1, 2,", "3, # comment 9 ] } # comment 10 //", "1 b: ${c} { pa: [${a}] pb: ${b.pa} } c:", "\"\"\" common_modules = ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "= ${x.y} \"\"\" ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\"", "config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say')", ") # on python 3 long will be an int", "test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\" b1 : { v1: 1", "'b' not in config assert config['d'] == 4 assert config['e']", "} @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR", "= ConfigFactory.parse_string( \"\"\" x = {a: 1, b: 2} x", "2 year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)),", "assert expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\"", "== 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432", "2}') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: {{ include \"{tmp_file}\"", "] } # comment 10 // comment 11 // comment", "man, d=woof, a-b-c-d=test, a b c d=test2, \"a b c", "b: 1 c: 2 } \"\"\" ) config2 = ConfigFactory.parse_string(", "{ 'num': 3, 'retries_msg': 'You have 3 retries' } def", "\"\"\" string_from_env = ${STRING_VAR} \"\"\") assert config == { 'string_from_env':", "period(hours=8)), ('a: 9 h', period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a:", "database { name = ${?user} pass = ${?pass} } \"\"\")", "test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = {f: 4} a:", "\"bar\"]') assert config_tree == { 'a': ['foo\"', \"bar\"] } def", "6] assert config.get_list('a') == [1, 2, 3, 4, 5, 6]", "data-center-east = ${data-center-generic} {name = \"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size')", "in config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"', '\"second\" line', ''] assert", "3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\" dict = {", "} data-center-east = ${data-center-generic} data-center-east = { name = \"east\"", "be equivalent to x = abc x = ${?x} def", "comment 12 \"\"\" ) assert config.get('c') == 'test' assert config.get('g')", "long assert config['short'] == 12 assert isinstance(config['short'], int) assert config['long']", "d['banana'] = 3 d['apple'] = 4 d['pear'] = 1 d['orange']", "b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: {{ include", "} foo : ${foo.a} foo : { a : 2", "114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds',", "2, 3] \"\"\" ) (one, two, three) = config.get(\"x\") assert", "millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)),", "} \"\"\" ) # use unicode path here for regression", "2 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" a.b =", "test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\" common_modules = perl \\ java", "{perl: 1} \"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a", "u = 192.168.1.3/32 g = null } \"\"\" ) assert", ": 1 b : 2 } } o2 = {", "b = # test # test2 { c: 3, d:", "\"\"\") assert config['c'] == 'foo 1' assert config['d'] == '1", "x in config['common_modules'].split() if x.strip(' ') != ''] == ['perl',", "== 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars')", "{ 'a': 5, 'b': '55', 'c': '5 5' } def", "1 \"\"\") assert 'b' not in config assert config['d'] ==", "1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000 ns',", "} a : { x : 42 } } \"\"\"", "= ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\")", "5 config2 = ConfigFactory.parse_string( \"\"\" { database { host =", ": { foo : 43 } \"\"\" ) assert config.get(\"bar\")", "= ConfigFactory.parse_string( \"\"\" b = {f: 4} a: [ ${b}", "= ConfigFactory.parse_string( \"\"\" short = 12.12321 long1 = 121.22E3423432 neg_long1", "config = ConfigFactory.parse_string( \"\"\" a: 1 b: { pb: 5", "config.get('a.c') == 2 assert config.get('b.c') == 5 assert config.get('b.d') ==", "bar \", 'trailing_ws': \"foo bar \", 'trailing_ws_with_comment': \"foo bar \"", "4, 'pear': 1, 'orange': 2, } config = ConfigFactory.from_dict(d) assert", "121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432 assert", "1, 2, 3, 4, 5, 6] def test_self_append_array(self): config =", "\"\"\" // bar.a should end up as 4 bar :", "== [1, 2, 3] assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g')", "\"-Xmx4g\" } \"\"\" ) assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name')", "resolve=False) \\ .with_fallback(config3) assert {'a': 5, 'b': 5, 'c': 5}", "assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON", "{a: 1, b: 2} x = ${x} {c: 3} x", "{'a': 1, 'b': 2, 'c': 3, 'z': 0, 'y': -1,", "1, 'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\" x", "== { 'a': 5, 'b': '55', 'c': '5 5' }", "= ConfigFactory.parse_string( \"\"\" a = foo \"bar\" dummy \"\"\") assert", "== '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n'", "== {'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string(", "121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3 \"\"\" ) #", "= <PASSWORD> } database { name = ${?user} pass =", "} data-center-east = { name = \"east\" } data-center-east =", "raise an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config =", "'b': {'pa': [1], 'pb': [1]}, 'c': {}, 'd': {'pc': [1]},", "equivalent to x = ${?x} def ''' config = ConfigFactory.parse_string(", "4] [ 5, 6 ] \"\"\" ) assert config.get('a') ==", "up as 3 foo : { c : ${bar.b}, d", "name: foo \"\"\" ) config5 = ConfigFactory.parse_string( u\"\"\" longName: \"long", "test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\" a: [ {a: 1, b:", "expected = { 'a': {'d': 6} } assert expected ==", "assert expected == config_tree def test_merge_overriden(self): # Adress issue #110", "ConfigFactory.parse_string( \"\"\" x = [1,2] x = ${x} [3,4] x", "# -*- encoding: utf-8 -*- import json import os import", "\"a\": { \"d\": { \"g\": { \"h\": { \"j\": {", "config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c')", "== {'a': 4, 'b': 3} assert config.get(\"foo\") == {'c': 3,", "5 } # comment 6 t = [1, # comment", "[ 5, 6 ] \"\"\" ) assert config.get('a') == [1,", "== 'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are in the", "'\\\\n', 'cr': '\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp', } config =", "ignore_errors=True) def test_include_dict(self): expected_res = { 'a': 1, 'b': 2,", "period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a: 3", "3 4 ] \"\"\" ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\")", ") assert config2.get('a.b.c') == 5 assert config2.get('d') == 'test 5'", "\"\"\" foo: 42 foo: ${?a} \"\"\", resolve=False) source = ConfigFactory.parse_string(", "default=42) == 42 assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') ==", "\"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size')", "}} \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3 = ConfigFactory.parse_string(", "in names assert 'milk' in types def test_list_of_dicts(self): config =", "assert config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\")", "= null b = [null] \"\"\" ) assert config.get('a') is", "assert config['a'] == [ [1, 2], [3, 4] ] def", "retries_msg = ${?CUSTOM_MSG} \"\"\") assert config == { 'num': 3,", "relativedelta as period except Exception: from datetime import timedelta as", "assert three == 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\"", ") assert config2['database']['host'] == 'other.host.net' assert config2['database']['port'] == 433 assert", "== expected_res config2 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3", "source = ConfigFactory.parse_string( \"\"\" b: 14 \"\"\") config = unresolved.with_fallback(source)", "} \"\"\" ) assert config1.get('a.b.c') == 5 assert config1.get('d') ==", "v in config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"', '\"second\" line', '']", "assert config1.get('a.b.c') == 'str' assert config1.get('d') == 'str' assert config1.get('f')", "config = ConfigFactory.parse_string( \"\"\" database { host = localhost port", "0 g = 6 test # comment 0 # comment", "\"bar \"{ws} trailing_ws_with_comment = \"foo\" \"bar \"{ws}// comment \"\"\".format(ws=' '))", "{ VAR : ${var} } substrate-suite: { VAR : \"right\"", "ParseException, ParseSyntaxException import mock import pytest from pyhocon import (ConfigFactory,", "a = ${?test} a = 5 \"\"\" ) assert config1['a']", ": ${var} } substrate-suite: { VAR : \"right\" } b1", "assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts')", ": 2 } } o2 = { foo : {", "e : ${a} { } \"\"\", resolve=False ) merged =", "${x} x = ${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self): ''' Example", "config['a'] == [1, 2] assert config['b'] == [3, 4] assert", "== d def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] = 3", "\"\"\" \"a.b.c.d\": 3 t { \"d\": { \"c\": 5 }", "c = 5 \"d\" = true e.y = { f:", "\"\"\" ) # use unicode path here for regression testing", "\"\"\" common_modules = [perl] \\ [java] \\ [python] \"\"\" )", "\"\"\" common_modules = [perl] host_modules = aa ${common_modules} bb \"\"\"", "config = ConfigFactory.parse_string( \"\"\" x = {x: [3,4]} x =", "= 5 } } d = ${a.b.c} } \"\"\" )", ".54 assert config['d'] == -.54 def test_sci_real(self): \"\"\" Test scientific", "config2['database']['port'] == 433 assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1", "} b1 = [] var = \"wrong\" compilerCommon : ${common}", "a { a: 1, b: 2, } b # test", ") config2 = ConfigFactory.parse_string( \"\"\" b2 : ${x} {v2: 3}", "== [1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\" a:", "config = ConfigFactory.parse_string( \"\"\" a: 1 b: 2 include package(\"my_module:my.conf\")", "www.example-ö.com { us { name = \"second domain\" } }", "import os import shutil import tempfile from collections import OrderedDict", "'`', '^', '?', '!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char):", "with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x : 10, y :", "\"str \" } } d = test ${a.b.c} f =", "{ b: { c = 5 } } a.b {", "host = \"localhost\" port = 8000 url = ${database.host}\":\"${database.port} }", "== [1, 2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\"", "{ b: { c = 5 } } d =", "} assert expected == config_tree def test_merge_overriden(self): # Adress issue", "['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2,", "= 45 b = ${?c} d = ${?c} 4 e", "\"\"\" ) assert config['a'] == 'a' assert config['b'] == 'b'", "2] [3, 4] ] \"\"\" ) assert config['a'] == [", "= [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC',", "== [1, 2] config3 = ConfigFactory.parse_string( \"\"\" a: [ include", "ConfigFactory.parse_string( \"\"\" a = [ \"a\", \"b\", ] b =", "4} a: [ ${b} {a: 1, b: 2}, {a: 3,", "resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") ==", "test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = 4 b =", "config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo'] == '5' def", "config = ConfigFactory.parse_string( \"\"\" a: 1 b: ${c} { pa:", "6 def test_assign_int(self): config = ConfigFactory.parse_string( \"\"\" short = 12", "= ${a.b} a.c = [1,2] a.c = ${a.c} a.d =", "} def test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\" a = //", "a: { # comment 4 b: test, # comment 5", "1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\" b1", "\\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash'] ==", "== { 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self): config", "'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\"", "4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}')", "x = abc x += def \"\"\" ) assert config.get(\"x\")", "]) def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] ==", "+ fdin.name + \"\"\"\" } a : { x :", "config == d def test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1 =", "${main_language}] \"\"\" ) assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self):", "with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with", "assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def", "] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = [5,", "{r: 1, s: 2} baz: {s: 3, t: 4} \"\"\")", "\"mist\" } \"\"\" ) # use unicode path here for", "test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\" a : { } b", "4} { c: 5 } \"\"\" ) assert config.get('a.b') ==", "7 assert config.get('a.b.d') == 8 def test_concat_dict(self): config = ConfigFactory.parse_string(", "\"\"\" ) assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config =", "test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value))", "\"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b2 : ${x}", "config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( \"\"\" data-center-generic = {", "assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c': 3} finally:", "def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir,", "= ${Test.field1}\"2\" field3 = ${Test.field2}\"3\" } \"\"\" ) assert config.get_string(\"A.field1\")", "def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\" common_modules = perl \\", "config.get('b')[0] is None def test_parse_override(self): config = ConfigFactory.parse_string( \"\"\" {", "9 h', period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a: 11 week',", "[ ('a: 1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2", "and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\"", "= ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == [] def test_include_dict_from_samples(self): config =", "ConfigFactory.parse_string( \"\"\" a = { a: 1, b: 2, }", "== \"12\" assert config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self): config =", "5.0 assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') == 'hey dude!'", "config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted:", ") with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\"", "b=\"abc\", c=the man, d=woof, a-b-c-d=test, a b c d=test2, \"a", ": 42, baz : ${bar.foo} } \"\"\" ) assert config.get(\"bar\")", "test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\" a { include required(\"samples/animals.d/cat.conf\") t", "assert config3['a'] == { 'b': 1, 'c': 2, 'd': 3", "[7, 8] ] \"\"\" ) assert config['a'] == [ [5,", "4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\" list", "= {x: [3,4]} x = [${x}, 2, 3] \"\"\" )", "\"f\" ] \"\"\" ) assert config['a'] == ['a', 'b'] assert", "'python'] assert config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert config4.get('full_modules')", "assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"]", "== 'hey dude!' assert config.get('t.e.y.h') == 'hey man' assert [v.strip()", "assert config2['a'] == [1, 2] config3 = ConfigFactory.parse_string( \"\"\" a:", "\"\"\" source = r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\":", "} \"\"\" ) assert config3.get('a.b.c') == 'str' assert config3.get('d') ==", ") assert config.get('a.b.c') == 7 assert config.get('d') == 'test 7", "config['x'] == 42 assert config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self):", "us { name = \"first domain\" } } www.example-ö.com {", "assert config.get_int('t.g') is None assert config.get_float('t.g') is None assert config.get_string('t.g')", "== ['java', 'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config", ") assert config.get('a.b') == 1 assert config.get('a.c') == 2 assert", "bar \" } def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a", "a = // abc abc c = 5 \"\"\") assert", ") assert config1.get('a.b.c') == 5 assert config1.get('d') == 5 config2", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a =", "} data-center-east = ${data-center-generic} \"\"\" ) assert config6['data-center-east'] == {", "a.c = [1,2] a.c = ${a.c} a.d = {foo: bar}", "config2 = ConfigFactory.parse_string( \"\"\" b2 : ${x} {v2: 3} b", "Test { field1 = 1 field2 = ${Test.field1}\"2\" field3 =", "common_modules = [perl] host_modules = aa ${common_modules} bb \"\"\" )", "== 'east' config3 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "== config_tree.pop('a.c', 5) expected = { 'a': {'d': 6} }", "{a: 3} ${b} {c: 6}, ] \"\"\" ) assert config['a']", "ConfigFactory.parse_string( \"\"\" a: foo b: [a, 1, 10 weeks, 5", "d : [ ${b} ] } \"\"\", resolve=False ) config2", "ConfigFactory.parse_string( \"\"\"o1 = { foo : { a : 1", "== 'str' assert config3.get('d') == 'test str me' assert config3.get('f')", "config = ConfigFactory.parse_string( \"\"\" a = a b c b", "config.get(\"x\") == [1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\"", "'<NAME>' in names assert 'Homer\\'s favorite coffee' in names assert", "= ConfigFactory.parse_string( \"\"\" a { d { g.h.j.u: 5 g", "= 5 \"\"\" ) assert config1['a'] == 5 config2 =", ") with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} def", "${compilerCommon} ${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR']", "ConfigFactory.parse_string( \"\"\" { a: { b: { c = str", "o3 = ${o1} ${o2} \"\"\" ) assert config.get_int('o1.foo.b') == 2", "x = ${?x} def ''' config = ConfigFactory.parse_string( \"\"\" x", "c = b 7 \"\"\" ) assert config.get('a') == 'a", "= 6 } data-center-east = ${data-center-generic} {name = \"east\"} \"\"\"", "in config assert config['h'] == 1 def test_cascade_optional_substitution(self): config =", "\"\"\" common_modules = abc ${non_existent} def \"\"\" ) def test_non_compatible_substitution(self):", "b: 5 } } \"\"\" ) assert config.get_string('a.b') == '5'", "[ {'a': 1, 'b': 2, 'f': 4}, {'a': 3, 'c':", "5 assert config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string( \"\"\"", "5, f: 6 } \"\"\" ) assert config['a'] == {'a':", "\"\"\") assert config == { 'x': 5, 'b': 'test', 'a':", "] \"\"\" ) assert config.get('a') == [1, 2, 3, 4,", "config['a'] == [ [1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self):", "] \"\"\" ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path)", "= \"east\"} ${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size') == 6 assert", "\"\"\" ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\" x =", "= ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name) )", "6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\" a : {", "a: 1, b: 2, } b = # test #", "ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try:", "${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ]", "config = ConfigFactory.parse_string( \"\"\" a = [ 1, 2, ]", "\"\"\" a: [ include required(\"dummy.txt\") 3 4 ] \"\"\" )", "= config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef' # test no mutation", "1} assert config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys()) == set(['y', 'z'])", "def ''' config = ConfigFactory.parse_string( \"\"\" x += def \"\"\"", "= {foo: bar} a.d = ${a.d} \"\"\" ) assert config.get(\"a\")", "+ '\"' + fdin.name + \"\"\"\" } a : {", "a = a b c b = 5 b c", "= 12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2 =", "test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string(", "json.loads(source) try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1", "None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?',", "'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def", "config['foo'] == 42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2 =", "ConfigFactory.parse_string( \"\"\" name: foo \"\"\" ) config5 = ConfigFactory.parse_string( u\"\"\"", "dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)), ('a:", "= ${e} } } d = test ${a.b.c} me e", "config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h') == 'hey man' assert", "def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} x", "config['c'] == 6 def test_assign_int(self): config = ConfigFactory.parse_string( \"\"\" short", "= { 'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab': '\\\\t', 'newline': '\\n',", "[-1, 0] ${x} [5, 6] x = [-3, -2] ${x}", "== 'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a =", "5, 'b': 'test', 'a': 'foo bar test dummy', 'c': 'foo", "['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\" common_modules", "def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\" a : { }", "= ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo'] == 5.0 def test_list_substitutions(self):", "\"\"\" ) assert config.get(\"x\") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config", "{ f: 5 g } \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a =", "[] var = \"wrong\" compilerCommon : ${common} { VAR :", "a: {{ include \"{tmp_file}\" c: 3 d: 4 }} \"\"\".format(tmp_file=fdin.name)", "'&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char',", "domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure',", "6], [1, 2, 5, 6, 7, 8] ] def test_invalid_assignment(self):", "data-center-generic = { cluster-size = 6 } data-center-east = ${data-center-generic}", "config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR} \"\"\") assert config", "55 \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "= ConfigFactory.parse_string( \"\"\" x += [1,2] \"\"\" ) assert config.get(\"x\")", "\"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\" expected =", "def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush()", "('a: 2 month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a: 3mo',", "but on python 2 long with be a long assert", "short = 12 long = 12321321837612378126213217321 negative = -15 \"\"\"", "ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\": 2, \"v2\": 3}", "config.get(\"a\") == 'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with", "{name = \"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size') == 6 assert", "4 d['pear'] = 1 d['tree'] = { 'a': 'abc\\ntest\\n', 'b':", "mid.b = 1 \"\"\" ) config = root.get_config(\"mid\").with_fallback(root) assert config['a']", "ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR} \"\"\") assert config == {", "be discovered monkeypatch.syspath_prepend(temp_dir) # load the config and include the", "pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException):", "config = ConfigFactory.parse_string( \"\"\" a = 5 b=${a}${a} c=${a} ${a}", "forbidden_char): value = \"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert", "config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"]", "a.b = ${a.b} a.b = ${a.b} a.c = [1,2] a.c", "g.h.j.u: 5 g { h.d: 4 } g.h.k: f d", "tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1 =", "10 // comment 11 // comment 12 \"\"\" ) assert", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} \"\"\" )", "test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\" x = {x: [3,4]} x", "config = ConfigFactory.parse_string( \"\"\" { a : { include \"\"\"", ") assert config.get(\"x.y\") == {'z': 1} assert config.get(\"x.z\") == 1", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a = ${b} b = ${c}", "period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a:", "test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] = 4", "\"bar \" trailing_ws = \"foo\" \"bar \"{ws} trailing_ws_with_comment = \"foo\"", "b = [5, 6] a: [ ${b} [1, 2] [3,", "test # comment 0 # comment 3 a: { #", "config.get(\"x\") assert one == {'x': [3, 4]} assert two ==", "try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months',", "\"{}\"'.format(value)) assert config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string(", "c=${a} ${a} \"\"\" ) assert config == { 'a': 5,", "data-center-generic = { cluster-size = 6 } data-center-east = {", "'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"5\"') assert config['foo']", "{'x': [3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config =", "b c' assert config.get('b') == '5 b' assert config.get('c') ==", "== { 'a': ['foo\"', \"bar\"] } def test_pop(self): config_tree =", "} config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_ordered_dict(self):", "assert config_tree == { 'foo': '1', 'bar': '2' } def", "== [1, 2] config2 = ConfigFactory.parse_string( \"\"\" a: [ include", "x = {y: {z: 1}} x = ${x.y} \"\"\" )", "def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec ''' config =", "= ConfigFactory.parse_string( \"\"\" a = { a: 1, b: 2,", "= ConfigFactory.parse_string( \"\"\" a: {{ include \"{tmp_file}\" c: 3 d:", "= ConfigFactory.parse_string( \"\"\" // bar.a should end up as 4", "\"u\": 5 }, \"d\": 4, \"k\": \"f d\" } }", "def test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "common_modules = [php, python] host_modules = [java] ${common_modules} \"\"\" )", "with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush() config =", "d = test ${a.b.c} me f = test ${a.b.e} me", "a { b = foo c = bar } a.c", "config files are available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b':", "= # test # test2 [ 3, 4,] c =", "include \"dummy.txt\" 3 4 ] \"\"\" ) assert config1['a'] ==", "'abc' assert config.get('c') == 'the man' assert config.get('d') == 'woof'", "3 long will be an int but on python 2", "\"\"\" common_modules = [php, python] host_modules = [java] ${common_modules} [perl]", "\"\"\" app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ]", "misc = \"mist\" } \"\"\" ) # use unicode path", "} } d = test ${a.b.c} } \"\"\" ) assert", "def test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\")", "pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue", "${dict} { y: 2 } \"\"\", resolve=False ) config2 =", "assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com", "a.d = {foo: bar} a.d = ${a.d} \"\"\" ) assert", "${a} ${b} c: ${b} ${a} d: ${a} ${b} d: ${a}", "('a: 8 hour', period(hours=8)), ('a: 9 h', period(hours=9)), ('a: 10", "'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6", "me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\" app.heap_size = 128", "'\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\" test = line1", "= ConfigFactory.parse_string( \"\"\" x = {y: {y: 1}} x =", "= ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert config == { 'include-database':", "= -.54 \"\"\" ) # on python 3 long will", "= 3 d['apple'] = 4 d['pear'] = 1 d['orange'] =", "= 1 mid.b = 1 \"\"\" ) config = root.get_config(\"mid\").with_fallback(root)", "ConfigFactory.parse_string( \"\"\" a = foo bar \"\"\") assert config ==", "config1 = ConfigFactory.parse_string( \"\"\" b1 : { v1: 1 }", "config.get('a') == [1, 2, 3, 4, 5, 6] assert config.get_list('a')", "def test_missing_config(self): config = ConfigFactory.parse_string( \"\"\" a = 5 \"\"\"", "{y: 1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\") ==", "str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\" a = 1", "= ${?x} def ''' config = ConfigFactory.parse_string( \"\"\" x =", "config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config", "assert 'b' not in config assert config['d'] == 4 assert", "\"\"\" a = a b c b = 5 b", "a : ${foo.d}, b : 1 } bar.b = 3", "\"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert config ==", "config = ConfigFactory.parse_string( \"\"\" x += [1,2] \"\"\" ) assert", "assert config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\"", "= ConfigFactory.parse_string( \"\"\" a = // abc abc c =", "assert config == { 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def", "== '/tmp' config5 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "{ cluster-size: 8 } misc = \"mist\" } \"\"\" )", "ConfigFactory.parse_string( \"\"\" a = ${b} b = ${c} c =", ": { a : { c : 1 } }", "= r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\":", "= ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\": 2, \"v2\":", "= \"5\"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config =", "ConfigFactory.parse_string( \"\"\" a: 1 b: foo c: ${a} ${b} c:", "== 5 def test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com { us", "x += def \"\"\" ) assert config.get(\"x\") == \" def\"", "not set so show raise an exception with pytest.raises(ConfigMissingException): config.get('b')", "1, b: 2} x = ${x} {c: 3} x =", "11 week', period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a: 10 days',", "{bar['type'] for bar in bars if 'type' in bar} print(types,", "\"\"\" ) assert config.get(\"bar\") == {'a': 4, 'b': 3} assert", "assert config.get(\"x.y\") == [5, 6] assert config.get(\"x.z\") == {'x': [3,", "is True assert config.get_int('t.e.y.f') == 7 assert config.get('t.j') == [1,", "= \"foo\" \"bar \"{ws} trailing_ws_with_comment = \"foo\" \"bar \"{ws}// comment", "config['t.d.c'] == 5 assert config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self): config", "'d': {'pc': [1]}, 'e': {'pa': [1], 'pb': [1]} } def", "with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self,", "6 ] \"\"\" ) assert config['a'] == [1, 2] assert", "assert '<NAME>' in names assert 'Homer\\'s favorite coffee' in names", "1, 2, 3 ] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\"", "\"\"\" data-center-generic = { cluster-size = 6 } data-center-east =", "6 } data-center-east = {name = \"east\"} ${data-center-generic} data-center-east-prod =", "expected_result = { \"a\": { \"d\": { \"g\": { \"h\":", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = aa ${common_modules} bb", "${b} ] } \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\"", "config6 = config4.with_fallback(config5) assert config6 == { 'longName': 'long foo',", "= ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules = [java]", "= ${data-center-generic} data-center-east = { name = \"east\" } \"\"\"", "== 'b 7' def test_concat_list(self): config = ConfigFactory.parse_string( \"\"\" a", "assert config == { 'a': 5, 'b': '55', 'c': '5", "= {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]')", "None assert config.get_list('t.g') is None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char',", "'\\n', 'no-newline': '\\\\n', 'cr': '\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp', }", "comment 8 3, # comment 9 ] } # comment", "${x} \"\"\" ) assert config.get(\"x\") == [-3, -2, -1, 0,", "= ${?DB_HOST} } database { host = \"other.host.net\" port =", "def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\")", "be a long assert config['a'] == 121.22 assert config['b'] ==", "\"5\"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo", "common_modules = [perl] host_modules = 55 ${common_modules} \"\"\" ) with", "'garfield': { 'say': 'meow' }, 't': 2 } } assert", "['php', 'python'] assert config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4", "test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\" A = ${Test} Test {", "test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" a = 45 b =", "y: 2 } \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert", "necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with", "= ${?NOT_EXISTS} pass = ${?NOT_EXISTS} } \"\"\") assert config['database.name'] ==", "ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert config == {", "42 \"\"\".format(tmp_file=fdin.name) ) assert config['x'] == 42 assert config['y'] ==", "12321321837612378126213217321 negative = -15 \"\"\" ) # on python 3", "= ConfigFactory.parse_string( \"\"\" b1 : { v1: 1 } b", "full_modules = ${host_modules} [c, go] \"\"\" ) assert config4.get('common_modules') ==", "assert config['d'] == 4 assert config['e'] == 45 assert 'g'", "[5,6]} x = {z: ${x}} \"\"\" ) assert config.get(\"x.x\") ==", "and include the other config file from 'my_module' config =", "config = unresolved.with_fallback(source) assert config['foo'] == 42 config = source.with_fallback(unresolved)", "merge with its .overriden_value # if both are ConfigTree config_tree", "= [ 5, 6 ] \"\"\" ) assert config['a'] ==", "set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec ''' config", "\"k\": \"f d\" } } }, \"h\": { \"i\": {", "HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" // bar.a should", "d { g.h.j.u: 5 g { h.d: 4 } g.h.k:", "test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules", "config.get('host_modules') == ['php', 'python', 'java'] config2 = ConfigFactory.parse_string( \"\"\" common_modules", "ConfigFactory.parse_string( \"\"\" x = {x: [3,4]} x = [${x}, 2,", "assert config == { 'no_trailing_ws': \"foo bar \", 'trailing_ws': \"foo", "'windows': 'c:\\\\temp', } config = ConfigFactory.parse_string(source) assert config == expected", "\"\"\") assert config == { 'include-database': True } def test_substitution_override(self):", "== 3 assert config['t.d.c'] == 5 assert config['k.\"b.f.d\"'] == 7", "config['b'] == {'c': 3, 'd': 4} assert config['c'] == {'e':", "dummy c = foo ${x} bv d = foo ${x}", "result is not config1 # test no mutation on config2", "ConfigFactory.parse_string( \"\"\" a = { b: 1 c: 2 }", "short = 12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2", "b : 1 c : ${a} { d : [", "'fgh' == config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\" common", "5 assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') == 7 assert", "\\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert", "\"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} abc", "test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\" { a: { b: 5", "1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)),", "load the config and include the other config file from", "'128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ]", "\"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') ==", "# ConfigValues must merge with its .overriden_value # if both", "'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\" string =", "ConfigFactory.parse_string(\"foo = bar\") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config", "\"abc\\\\n\\\\n\" \"\"\" ) assert config == { 'test_no_quotes': 'abc\\n\\n', 'test_quotes':", "'java'] config2 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules", "\"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert 'abc\"test' == config['quoted']", "'no_trailing_ws': \"foo bar \", 'trailing_ws': \"foo bar \", 'trailing_ws_with_comment': \"foo", "121.22 assert config['b'] == -121.22 assert config['c'] == .54 assert", "config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\":", "def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow' assert", "[5, 6] a: [ ${b} [1, 2] [3, 4] ${b}", "ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value']", "3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3 mon', '3", "\"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2 = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" common : { } b1 = [] var", "assert config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self): config =", "c = 5 \"\"\") assert config == { 'a': 'abc',", "a=1, b=\"abc\", c=the man, d=woof, a-b-c-d=test, a b c d=test2,", "ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\", "${?DB_HOST} } database { host = \"other.host.net\" port = 433", "def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t {", "-Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" ) assert config.get('app.java_opts') == [ '-Xms128m',", "== 'test str' assert config2.get('f') == 'test str ' config3", "3 retries_msg = You have ${num} retries retries_msg = ${?CUSTOM_MSG}", "5, 'c': 5} == config1 def test_optional_substitution(self): config = ConfigFactory.parse_string(", "min'), ('a: 4 seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a:", "[\"a\"] assert config.get_list('sub.baz') == [\"a\", \"b\"] assert config.get_list('sub2.baz') == [\"a\",", "test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def", "with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\"", "{ 'say': 'meow' }, 't': 2 } } assert expected", "\"j\": { \"u\": 5 }, \"d\": 4, \"k\": \"f d\"", "= merged.get(\"b\") assert len(b) == 1 assert b[0] == {\"v1\":", "== 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self): config =", "c: { } d: { pc: ${b.pa} } e: ${b}", "config['foo'] == 42 config = source.with_fallback(unresolved) assert config['foo'] == 42", "'b': 3} assert config.get(\"foo\") == {'c': 3, 'd': 4} assert", "5 43' } def test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a:", "= \"str \" } } d = test ${a.b.c} f", "{'pc': [1]}, 'e': {'pa': [1], 'pb': [1]} } def test_assign_next_line(self):", "TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE}", ") assert config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys()) == set(['y']) def", "} d = test ${a.b.c} me f = test ${a.b.e}", "b: { c = 5 } } d = ${a.b.c}", "-.54 \"\"\" ) # on python 3 long will be", "g { h.d: 4 } g.h.k: f d } h.i.m", "g } \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self):", "{ us { name = \"first domain\" } } www.example-ö.com", "b : 3 c : 4 } } o3 =", "assert config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a", "assert config2['database']['port'] == 433 assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self):", "{bar['name'] for bar in bars} types = {bar['type'] for bar", "[3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\" a {", "path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\")", "} config = ConfigFactory.from_dict(d) assert config == d def test_object_concat(self):", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] \\ [java] \\ [python] \"\"\"", "a = ${?a}foo \"\"\" ) assert config.get(\"a\") == 'foo' assert", "6 } \"\"\" ) assert config['a'] == {'a': 1, 'b':", "with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" )", "= ConfigFactory.parse_string( \"\"\" x += def \"\"\" ) assert config.get(\"x\")", "${common} { VAR : ${var} } substrate-suite: { VAR :", "2, 'c': 1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): '''", "{s: 3, t: 4} \"\"\") assert 'r' in config_tree['foo'] and", "config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are in", "${non_existent} abc \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules =", "b # test # test2 { c: 3, d: 4,}", "'no-cr': '\\\\r', 'windows': 'c:\\\\temp', } config = ConfigFactory.parse_string(source) assert config", "assert config.get('c') == 'test' assert config.get('g') == '6 test' assert", "host_modules = [java] ${common_modules} [perl] full_modules = ${host_modules} [c, go]", "= {x: [3,4]} x = {y: [5,6]} x = {z:", "config1 = ConfigFactory.parse_string( \"\"\" a = 123 a = ${?test}", "common_modules = abc ${non_existent} def \"\"\" ) def test_non_compatible_substitution(self): with", ") assert config['a'] == ['a', 'b'] assert config['b'] == ['c',", "THE ABOVE SETTINGS!\"\"\") assert config_tree == { 'foo': '1', 'bar':", "5, 6], [1, 2, 5, 6, 7, 8] ] def", "def test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\"", "== 12 assert isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321 assert", "test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are in the same format as", "common_modules = abc ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "c = 5 } } d = test ${a.b.c} me", "config.get(\"x\") == {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config =", "def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.large-jvm-opts", "{ 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config =", ") merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\":", "config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a')", "5, 6 ] \"\"\" ) assert config.get('a') == [1, 2,", "other config file from 'my_module' config = ConfigFactory.parse_string( \"\"\" a:", "{ c = 5 } } d = ${a.b.c} }", "test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert config == { 'test_no_quotes': 'abc\\n\\n',", "assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\"", "= ConfigFactory.parse_string( \"\"\" { database { host = \"localhost\" port", "7 \"\"\" ) assert config.get('a') == 'a b c' assert", "period(minutes=3)), ('a: 3 min', '3 min'), ('a: 4 seconds', period(seconds=4)),", "-15 def test_assign_float(self): config = ConfigFactory.parse_string( \"\"\" a = 121.22", "7 } \"\"\" ) assert config.get('a.b.c') == 7 assert config.get('d')", "= test ${a.b.c} me } \"\"\" ) assert config3.get('a.b.c') ==", "== 42 config = source.with_fallback(unresolved) assert config['foo'] == 42 def", "print(types, '(((((') assert '<NAME>' in names assert 'Homer\\'s favorite coffee'", "= \"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\" ) assert", "ConfigFactory.parse_string( \"\"\" string = abc \"\"\" ) config2 = ConfigFactory.parse_string(", "3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1,", "[python] \"\"\" ) assert config['common_modules'] == ['perl', 'java', 'python'] def", "6 sec', '6 sec'), ('a: 7 hours', period(hours=7)), ('a: 8", "= bar } a.c = ${a.b}\" \"${a.b} a.d = baz", "[ \"a\", \"b\", ] b = # test # test2", "= ConfigFactory.parse_string(\"foo = bar\") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self):", "config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config =", "Example from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" a", "5 second', period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a: 6 sec',", "== \" def\" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\" x", "3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\"", "d = { 'banana': 3, 'apple': 4, 'pear': 1, 'orange':", "x = [${x}, 2, 3] \"\"\" ) (one, two, three)", "\"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3 = ConfigFactory.parse_string( \"\"\"", "pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102 config_tree =", "# comment 9 ] } # comment 10 // comment", "= ${common_modules} 55 \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "pass=<PASSWORD> database { user = ${user} pass = ${pass} }", "= ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False)", "== { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config", "[v.strip() for v in config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"', '\"second\"", "'b': 5, 'c': 5} == config1 def test_optional_substitution(self): config =", "'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = {", "= ConfigFactory.parse_string( \"\"\" a.b = 4 a.d = 3 \"\"\"", "config2 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3 d: 4", "3] def test_missing_config(self): config = ConfigFactory.parse_string( \"\"\" a = 5", "['', '\"first line\"', '\"second\" line', ''] assert config.get_bool('t.d') is True", "'3 min'), ('a: 4 seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)),", "a = {f: 5} common_modules ${a} {perl: 1} \"\"\") def", "{ c = 7 d = 8 } } \"\"\"", "${foo.d}, b : 1 } bar.b = 3 // foo.c", "d = ${a.b.c} f = ${a.b.e} } \"\"\" ) assert", "= { cluster-size = 6 } data-center-east = ${data-center-generic} {name", "'meow' }, 't': 2 } } assert expected == config", "# DO NOT CHANGE ANY OF THE ABOVE SETTINGS!\"\"\") assert", "a: { b: { c = str e = \"str", "= \\\"\\\"\\\" \"first line\" \"second\" line \\\"\\\"\\\" } j =", "str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\" app.heap_size =", "192.168.1.3/32 g = null } \"\"\" ) assert config.get_string('t.c') ==", "d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2", "assert config['c'] == 'foo 1' assert config['d'] == '1 bar'", "== 'test str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\"", "3 assert config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a') == 1", "1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a: 3 m', period(minutes=3)),", "{ } \"\"\", resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self):", "[1, 2, 3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1 =", "period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a:", "config2['a'] == expected_res config3 = ConfigFactory.parse_string( \"\"\" a: {{ c:", "== [1, 2] assert config['b'] == [3, 4] assert config['c']", "'east' config3 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "[ ('a: 1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2", "\"\"\" a = { a: 1, b: 2, } b", "${?c2} h = ${?c1} ${?c2} 1 \"\"\") assert 'b' not", "= {y: -1} ${x} {d: 4} \"\"\" ) assert config.get(\"x\")", "assert config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c', 'go'] def", "d: 4,} c = { e: 5, f: 6 }", "def test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base : { bar: [\"a\"]", "3} assert config.get(\"foo\") == {'c': 3, 'd': 4} assert set(config.keys())", "ConfigFactory.parse_string(\"[1, 2, 3]\") assert config == [1, 2, 3] def", "= \"second domain\" } } \"\"\" config = ConfigFactory.parse_string(input_string) assert", "h: hey man i = \\\"\\\"\\\" \"first line\" \"second\" line", "\"\"\" ) assert config.get(\"x\") == [1, 2] def test_self_append_object(self): config", "== 2 assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') == 3", "[1], 'pb': [1]}, 'c': {}, 'd': {'pc': [1]}, 'e': {'pa':", "= config.get(\"x\") assert one == {'x': [3, 4]} assert two", "== 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo = 5.0\") assert", "== {'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string(", "\"\"\"o1 = { foo : { a : 1 b", "4] ] \"\"\" ) assert config['a'] == [ [1, 2],", "host = ${?DB_HOST} } database { host = \"other.host.net\" port", "[\"a\"] } sub : ${base} { baz: ${base.bar} [\"b\"] }", "\"bar \"{ws}// comment \"\"\".format(ws=' ')) assert config == { 'no_trailing_ws':", "== config config2 = ConfigFactory.parse_string( \"\"\" a { include required(file(\"samples/animals.d/cat.conf\"))", "= ConfigFactory.parse_string( \"\"\" a: {{ c: 3 d: 4 include", "with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with", "${x} x = {y: -1} ${x} {d: 4} \"\"\" )", "assert config.get('a') == 'a b c' assert config.get('b') == '5", "= ConfigFactory.parse_string( \"\"\" b1 : { v1: 2, v2: 3", "config = ConfigFactory.parse_string('foo = \"5\"') assert config['foo'] == '5' def", "42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False)", "\" } } d = ${a.b.c} f = ${a.b.e} }", "config.get_int('o3.foo.c') == 4 def test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base :", "\"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w')", "mock import pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree)", "'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\")", "6 s', period(seconds=6)), ('a: 6 sec', '6 sec'), ('a: 7", "} j = [1, 2, 3] u = 192.168.1.3/32 g", "a.d = 3 \"\"\" ) config3 = config1.with_fallback(config2) assert config3['a']", "week', period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a: 10 days', period(days=10)),", "'python', 'perl'] config4 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python]", "${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "required(file(\"samples/animals.d/cat.conf\")) t = 2 } \"\"\" ) assert expected ==", "\"12\" assert config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\") == \"1\" assert", "config3.get('a.b.c') == 5 assert config3.get('d') == 'test 5 me' def", "bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\" database { name", "\"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\":", "= ConfigFactory.parse_string(source) assert config == expected assert config == json.loads(source)", "ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert {'a': 5,", "= ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR} \"\"\") assert config ==", "\"\"\" x += def \"\"\" ) assert config.get(\"x\") == \"", "'-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC'", "\"\"\".format(ws=' ')) assert config == { 'no_trailing_ws': \"foo bar \",", "'!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a:", "= ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\" # DO NOT CHANGE", "3 include \"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config3['a']", "'x': 5, 'b': 'test', 'a': 'foo bar test dummy', 'c':", "us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)),", "1} x += {b: 2} \"\"\" ) assert config.get(\"x\") ==", "\"\"\" a : { } b : 1 c :", "ConfigFactory.parse_string( \"\"\" a = 5 b=${a}${a} c=${a} ${a} \"\"\" )", "True } def test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\" database {", "ConfigFactory.parse_string( \"\"\" a { include required(file(\"samples/animals.d/cat.conf\")) t = 2 }", "'b'] assert config['b'] == ['c', 'd'] assert config['c'] == ['e',", "2, } b # test # test2 { c: 3,", "e : ${a} { } \"\"\", resolve=False ) with pytest.raises(ConfigException):", "ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False) \\", "encoding: utf-8 -*- import json import os import shutil import", "dir to sys.path so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir)", "b: 2}, {a: 3, c: 4}, ] \"\"\" ) assert", "Adress issue #102 config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\"", "== ['a', 'b'] assert config['b'] == ['c', 'd'] assert config['c']", "\"b\", ] b = # test # test2 [ \"c\",", "b = ${?c} d = ${?c} 4 e = ${?a}", "'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\"", "\"g\": { \"h\": { \"j\": { \"u\": 5 }, \"d\":", "= ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def", "include \"{tmp_file}\" c: 3 d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert", "'trailing_ws': \"foo bar \", 'trailing_ws_with_comment': \"foo bar \" } def", "d e') == 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\"", "on config2 assert \"abc\" not in str(config2) def test_fallback_non_root(self): root", "== [3, 4] assert config['c'] == [5, 6] def test_assign_list_strings_with_eol(self):", "\"\"\") assert config == { 'a': 'foo bar dummy' }", "${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert", "name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS} } \"\"\") assert config['database.name']", "config = ConfigFactory.parse_string( \"\"\" a: 1 b: foo c: ${a}", "abc \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc", "'b': 2} assert config['b'] == {'c': 3, 'd': 4} assert", "{ c = ${e} } } d = test ${a.b.c}", "assert config2.get('d') == 'test str' assert config2.get('f') == 'test str", "set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x}", "a.b { c = 7 d = 8 } }", "= 1 field2 = ${Test.field1}\"2\" field3 = ${Test.field2}\"3\" } \"\"\"", "as fdin: fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string( \"\"\"", "test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} {y: 1}", "\"12\" assert config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string(", "data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self):", "} } foo : ${foo.a} foo : { a :", ") assert config.get(\"a\") == {'b': 3, 'c': [1, 2], 'd':", "'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö')", "OrderedDict() d['banana'] = 3 d['apple'] = 4 d['pear'] = 1", "common_modules = [perl] host_modules = ${common_modules} aa \"\"\" ) with", "config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102 config_tree = ConfigFactory.parse_string(\"\"\"", "{ d { g.h.j.u: 5 g { h.d: 4 }", "config = ConfigFactory.parse_string( \"\"\" b = {f: 4} a: [", "weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a: 12 w', period(weeks=12)),", "{ 'a': 1, 'b': 2, 'c': 3, 'd': 4 }", "c = 5 } } a.b { c = 7", "{ 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config =", "ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1,", "'-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm',", ") assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east' config3", "= ${STRING_VAR} \"\"\") assert config == { 'string_from_env': 'value_from_environment' }", "= {f: 4} a: [ ${b} {a: 1, b: 2},", "${x} [3,4] x = [-1, 0] ${x} [5, 6] x", "\"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w')", "man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*', '&'])", "months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a:", "set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x", "\"\"\" b: 14 \"\"\") config = unresolved.with_fallback(source) assert config['foo'] ==", "spec ''' config = ConfigFactory.parse_string( \"\"\" bar : { foo", "('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000", "== 1 assert set(config.get(\"x\").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config", "[3, 4] ${b} [1, 2] ${b} [7, 8] ] \"\"\"", "== ['php', 'python', 'java'] config2 = ConfigFactory.parse_string( \"\"\" common_modules =", "period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a:", "${common_modules} \"\"\" ) assert config2.get('host_modules') == ['java', 'php', 'python'] config3", "'test 5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\" {", "def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a = ${b} b", "${user} pass = ${pass} } \"\"\") assert config['database.user'] == 'test_user'", "'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" x += [1,2] \"\"\" ) assert config.get(\"x\") ==", "assert config['c'] == [5, 6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string(", "assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def", "= {name = \"east\"} ${data-center-generic} { cluster-size = 9, opts", "hour', period(hours=8)), ('a: 9 h', period(hours=9)), ('a: 10 weeks', period(weeks=10)),", "len(bars) == 10 names = {bar['name'] for bar in bars}", "= ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self): config", "2 config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_nested_dict(self):", "result = config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef' # test no", "'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 =", "def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\" foo: 42 foo: ${?a}", "assert config['a'] == [ {'a': 1, 'b': 2}, {'a': 3,", "config['a'] == [ {'a': 1, 'b': 2, 'f': 4}, {'a':", "names assert 'Homer\\'s favorite coffee' in names assert 'milk' in", "${a} { d : [ ${b} ] } \"\"\", resolve=False", "period class TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t =", "common_modules = [perl] host_modules = ${common_modules} 55 \"\"\" ) with", "config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size':", "\"\"\" a: {{ c: 3 include \"{tmp_file}\" d: 4 }}", "assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo =", "# comment 2 { c = test // comment 0", "== 'str ' config2 = ConfigFactory.parse_string( \"\"\" { a: {", "cluster-size: 8 } misc = \"mist\" } \"\"\" ) #", "== 'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a b c", "${?test} a = 5 \"\"\" ) assert config1['a'] == 5", "from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)),", "\"\"\" ) assert config3.get('a.b.c') == 5 assert config3.get('d') == 'test", "assert config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self):", "config2 = ConfigFactory.parse_string( \"\"\" { a: { b: { c", "config = ConfigFactory.parse_string( \"\"\" { a: { b: { c", "config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\" test_no_quotes:", "3, 4,] c = [ 5, 6 ] \"\"\" )", "\"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a", "assert config['foo'] == 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", "1 mid.b = 1 \"\"\" ) config = root.get_config(\"mid\").with_fallback(root) assert", "{'pa': [1], 'pb': [1]} } def test_assign_next_line(self): config = ConfigFactory.parse_string(", "\"\"\") assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment')", "assert config.get_int('t.e.y.f') == 7 assert config.get('t.j') == [1, 2, 3]", "period(days=10)), ('a: 11 day', period(days=11)), ('a: 12 d', period(days=12)), ('a:", "= [null] \"\"\" ) assert config.get('a') is None assert config.get('b')[0]", "h.i { e:65 } } \"\"\") expected_result = { \"a\":", "= 4 d['pear'] = 1 d['tree'] = { 'a': 'abc\\ntest\\n',", "pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with", "[c, go] \"\"\" ) assert config4.get('common_modules') == ['php', 'python'] assert", "'<PASSWORD>' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\" foo: 42 foo:", "have 3 retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "assert config.get(\"a\") == {'b': 3, 'c': [1, 2], 'd': {'foo':", "def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = { a:", "SETTINGS!\"\"\") assert config_tree == { 'foo': '1', 'bar': '2' }", "\"bar\" ${b} dummy c = foo ${x} bv d =", "assert config == { 'int_from_env': '5' } assert config.get_int('int_from_env') ==", ": { foo : 42, baz : ${bar.foo} } bar", "config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com {", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} \"\"\" ) with", "5 == config_tree.pop('a.c', 5) expected = { 'a': {'d': 6}", "= ${x.y} \"\"\" ) assert config.get(\"x.y\") == {'z': 1} assert", "3, 'c': 4, 'f': 4}, {'a': 3, 'c': 6, 'f':", "def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = 123 a", ": ${bar.b}, d : 2 } foo.d = 4 \"\"\"", "long assert config['a'] == 121.22 assert config['b'] == -121.22 assert", "assert config.get_string('a.b') == 'test' assert config.get('t') == [1, 2, 3]", "ConfigFactory.parse_string( \"\"\" a: [ {a: 1, b: 2}, {a: 3,", "'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string(", "} # comment 6 t = [1, # comment 7", "assert config2['database']['host'] == 'other.host.net' assert config2['database']['port'] == 433 assert config2['database']['url']", "4 } } o3 = ${o1} ${o2} \"\"\" ) assert", "'banana': 3, 'apple': 4, 'pear': 1, 'orange': 2, } config", "t: 4} \"\"\") assert 'r' in config_tree['foo'] and 't' in", ") assert config['a'] == 'a' assert config['b'] == 'b' assert", "6] assert config.get(\"x.z\") == {'x': [3, 4], 'y': [5, 6]}", "def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir =", "include \"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] ==", "config == { 'a': 1, 'b': {'pa': [1], 'pb': [1]},", "config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): #", "on python 3 long will be an int but on", "assert config['a']['y'] == 42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\"", "('a: 113 milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a: 110", "= u\"\"\" www.sample.com { us { name = \"first domain\"", "set(config.get(\"x\").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\"", "4 def test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base : { bar:", "3 d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == expected_res", ") assert config['a.b'] == \"foo\" assert config['a.c'] == \"foo foo\"", "-*- import json import os import shutil import tempfile from", "[${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b1 :", "4 \"\"\" ) assert config.get(\"bar\") == {'a': 4, 'b': 3}", "config = ConfigFactory.parse_string( \"\"\" x = abc x += def", "config = ConfigFactory.parse_string(source) assert config == expected assert config ==", "nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set):", "}, 't': 2 } } assert expected == config config2", "config2 = ConfigFactory.parse_string( \"\"\" { database { host = \"localhost\"", "5, 6 ] \"\"\" ) assert config['a'] == [1, 2]", "'str' assert config3.get('d') == 'test str me' assert config3.get('f') ==", "== { 'no_trailing_ws': \"foo bar \", 'trailing_ws': \"foo bar \",", "\"\"\") assert 'abc\"test' == config['quoted'] assert 'abc\"test' == config['unquoted'] def", "test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == [] def test_include_dict_from_samples(self):", ") assert config.get(\"x\") == \"abc def\" def test_self_append_non_existent_string(self): ''' Should", "3] assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is None assert", "'\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char',", "config.get('a.b.c') == 7 assert config.get('d') == 'test 7 me' def", "def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert len(bars)", "def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == [] def", "'test_user' assert config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self): config = ConfigFactory.parse_string(", "def test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com { us { name", "ConfigFactory.parse_string( \"\"\" app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m", "d = test ${a.b.c} me } \"\"\" ) assert config3.get('a.b.c')", "d\" } } }, \"h\": { \"i\": { \"m\": 7,", "config.get_string('t.g') is None assert config.get_bool('t.g') is None assert config.get_list('t.g') is", "\"\"\" Test scientific expression of number \"\"\" config = ConfigFactory.parse_string(", "from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import", "config['d'] == 4 assert config['e'] == 45 assert 'g' not", "regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2 == {", "} } o3 = ${o1} ${o2} \"\"\" ) assert config.get_int('o1.foo.b')", "== 'abc' assert config.get('c') == 'the man' assert config.get('d') ==", "= -15 \"\"\" ) # on python 3 long will", ") assert config.get('a.b.c') == 7 assert config.get('a.b.d') == 8 def", "3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config", "1, 'b': 2, 'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w')", "config = ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR} \"\"\") assert config", "] b = # test # test2 [ \"c\", \"d\",]", "'?', '!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value =", "= 6 } data-center-east = {name = \"east\"} ${data-center-generic} {", ": { v1: 2, v2: 3 } \"\"\", resolve=False )", "} \"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>'", "# test # test2 [ \"c\", \"d\",] c = [", "config2 = ConfigFactory.parse_string( \"\"\" e : ${a} { } \"\"\",", "${data-center-generic} \"\"\" ) assert config6['data-center-east'] == { 'name': 'east', 'cluster-size':", "== 'str' assert config1.get('f') == 'str ' config2 = ConfigFactory.parse_string(", "# test2 { c: 3, d: 4,} c { e:", "= 5 b c = b 7 \"\"\" ) assert", "test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\" list = [ 1, 2,", "trailing_ws_with_comment = \"foo\" \"bar \"{ws}// comment \"\"\".format(ws=' ')) assert config", "assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( \"\"\" data-center-generic =", "'&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey man{}\".format(forbidden_char) config =", "= {a:perl} \\ {b:java} \\ {c:python} \"\"\" ) assert config['common_modules']", "${a} { } \"\"\", resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def", "ConfigValues must merge with its .overriden_value # if both are", "config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') ==", "d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def", "def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x", "[1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\" a: [", "= ${pass} } \"\"\") assert config['database.user'] == 'test_user' assert config['database.pass']", "period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3", "{ a : 1 b : 2 } } o2", "= ConfigFactory.parse_string(\"[1, 2, 3]\") assert config == [1, 2, 3]", "{ bar: [\"a\"] } sub : ${base} { baz: ${base.bar}", "config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string(", "config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert len(b) == 2 assert", "= ConfigFactory.parse_string( \"\"\" { a: { b: 5 } }", "ConfigFactory.parse_string( \"\"\" x = ${x} x = ${x} \"\"\" )", "+= [ 4, 5, 6 ] \"\"\", resolve=False ) config2", "\"\"\" a: [ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert config1['a']", "config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( \"\"\" data-center-generic = {", ") assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string(", "5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\" x =", "== ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\"", "mon', '3 mon'), ('a: 1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)),", "ConfigFactory.parse_string( \"\"\" a: {{ c: 3 d: 4 include \"{tmp_file}\"", "issue #110 # ConfigValues must merge with its .overriden_value #", "= ConfigFactory.parse_string( \"\"\" a { include required(\"samples/animals.d/cat.conf\") t = 2", "config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\" foo:", "bars = config.get_list('bars') assert len(bars) == 10 names = {bar['name']", ") assert config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config", "\"\"\" ) assert config['a'] == [1, 2] assert config['b'] ==", "d = test ${a.b.c} f = test ${a.b.e} } \"\"\"", "ConfigFactory.parse_string('foo = \"5\"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config", "} } \"\"\" ) assert config['a']['x'] == 42 assert config['a']['y']", "pass = ${?pass} } \"\"\") assert config['database.name'] == 'peopledb' assert", "= { a: 1, b: 2, } b = #", "\"foo\" \"bar \" trailing_ws = \"foo\" \"bar \"{ws} trailing_ws_with_comment =", "\"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\",", "d['banana'] = 3 d['apple'] = 4 d['pear'] = 1 d['tree']", "${STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ,", "= ${x} {c: 3} x = {z: 0} ${x} x", "def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\" app.heap_size = 128 app.java_opts", "[java, ${main_language}] \"\"\" ) assert config.get('languages') == ['java', 'php'] def", "= ConfigFactory.parse_string( \"\"\" foo: 42 foo: ${?a} \"\"\", resolve=False) source", "\"m\": 7, \"d\": 5, \"e\": 65 } } } }", "3, 'retries_msg': 'You have 3 retries' } def test_substitution_cycle(self): with", "\"\"\" num = 3 retries_msg = You have ${num} retries", "assert config['d'] == -.54 def test_sci_real(self): \"\"\" Test scientific expression", "config = ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\" \"bar \" trailing_ws", "assert config == d def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana']", "= ConfigFactory.parse_string( \"\"\" string = abc \"\"\" ) config2 =", "config['c'] == {'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config =", "== \"1\" assert config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\") == \"123\"", "'no-newline': '\\\\n', 'cr': '\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp', } config", "ConfigFactory.parse_string( \"\"\" a { d { g.h.j.u: 5 g {", "test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1 =", "number \"\"\" config = ConfigFactory.parse_string( \"\"\" short = 12.12321 long1", "assert config['common_modules'] == {'a': 'perl', 'b': 'java', 'c': 'python'} def", "{ a: { b: { c = str e =", "config = ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR} \"\"\") assert config", "'128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules", "in types def test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\" a: [", ") config2 = ConfigFactory.parse_string( \"\"\" e : ${a} { }", "'a': 1, 'b': 2, 'c': 3, 'd': 4 } with", "def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\" e : ${a} {", "-2, -1, 0, 1, 2, 3, 4, 5, 6] def", "config.get_bool('t.d') is True assert config.get_int('t.e.y.f') == 7 assert config.get('t.j') ==", "assert config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\")", "\"\"\" ) # on python 3 long will be an", "assert config['b'] == ['c', 'd'] assert config['c'] == ['e', 'f']", "# on python 3 long will be an int but", "${non_existent} def \"\"\" ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "{ name = peopledb pass = <PASSWORD> } database {", "== 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config", "'orange': 2, } config = ConfigFactory.from_dict(d) assert config == d", "config == { 'a': 5, 'b': '55', 'c': '5 5'", "== '5' assert config.get_int('t.c') == 5 assert config.get_float('t.c') == 5.0", "test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a { a: 1, b:", "config = ConfigFactory.parse_string( \"\"\" a = [ \"a\", \"b\", ]", "\"\"\" a.b = 3 a.b = ${a.b} a.b = ${a.b}", "= OrderedDict() d['banana'] = 3 d['apple'] = 4 d['pear'] =", "def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\" x = {x: [3,4]}", "peopledb pass = <PASSWORD> } user=test_user pass=<PASSWORD> database { user", "a: 1 b: foo c: ${a} ${b} c: ${b} ${a}", "ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with", "== [3, 4] assert config.get(\"x.y\") == [5, 6] assert config.get(\"x.z\")", "= ${Test} Test { field1 = 1 field2 = ${Test.field1}\"2\"", "'g' not in config assert config['h'] == 1 def test_cascade_optional_substitution(self):", "ConfigFactory.parse_string( \"\"\" a = [ 1, 2, ] b =", "config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\" test", "def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\" test = line1 \\", "a: { b: { c = ${e} } } d", "a: foo b: 10 weeks c: bar \"\"\" ) assert", "# comment 6 t = [1, # comment 7 2,", "== '6 test' assert config.get('a.b') == 'test' assert config.get_string('a.b') ==", "['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config ==", "= 2 } \"\"\" ) expected = { 'a': {", "assert config.get('a b c d e') == 'test3' def test_dict_merge(self):", "config1 = ConfigFactory.parse_string( \"\"\" list = [ 1, 2, 3", "} } } } assert expected_result == config def test_parse_with_comments(self):", "if both are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar} foo:", "[ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon}", "ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir =", "'c': 'foo 5 bv', 'd': 'foo 5 43' } def", "42 config = source.with_fallback(unresolved) assert config['foo'] == 42 def test_fallback_with_resolve(self):", "@pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a:", "10 weeks c: bar \"\"\" ) assert config['b'] == period(weeks=10)", "== set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec '''", "config = ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert config", "config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') ==", "period(seconds=6)), ('a: 6 sec', '6 sec'), ('a: 7 hours', period(hours=7)),", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = aa ${common_modules}", "ConfigFactory.parse_string( \"\"\" { a : { include \"\"\" + '\"'", "] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: {", ") assert config.get(\"bar\") == {'foo': 43, 'baz': 43} assert set(config.keys())", "= \"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name')", "test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\" { a: { b: {", "('a: 7 hours', period(hours=7)), ('a: 8 hour', period(hours=8)), ('a: 9", "a : 1 b : 2 } } o2 =", "} k { \"b.f.d\": 7 } \"\"\" ) assert config['\"a.b.c.d\"']", "h = ${?c1} ${?c2} 1 \"\"\") assert 'b' not in", "config.get_string('t.c') == '5' assert config.get_int('t.c') == 5 assert config.get_float('t.c') ==", "include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2]", "STRING_VAR = ${STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment'", "u\"\"\" { a: { b: { c = str e", "with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = { f: 5 g", "bar in bars} types = {bar['type'] for bar in bars", "'-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == []", "\"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" dict = ${dict} {", "2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\" x += {a:", "} d = ${a.b.c} } \"\"\" ) assert config1.get('a.b.c') ==", "str me' assert config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self):", "+= {a: 1} \"\"\" ) assert config.get(\"x\") == {'a': 1}", "== 42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\" include-database=true \"\"\")", "config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz') == [\"a\", \"b\"] assert config.get_list('sub2.baz')", "\"\"\" ) assert config.get('a.b.c') == 7 assert config.get('d') == 'test", "ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "{ 'a': {'d': 6} } assert expected == config_tree def", "<PASSWORD> } database { name = ${?user} pass = ${?pass}", "def test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\"", ") config2 = ConfigFactory.parse_string( \"\"\" list = ${list} [ 4,", "database { user = ${user} pass = ${pass} } \"\"\")", "[ 1, 2, ] b = # test # test2", "\"\"\" ) assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def", "= ${?c} 4 e = ${?a} g = ${?c1} ${?c2}", "[1,2] x += [3,4] \"\"\" ) assert config.get(\"x\") == [1,", "ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from", "\"\"\" a = 1 mid.b = 1 \"\"\" ) config", "no mutation on config2 assert \"abc\" not in str(config2) def", "assert [v.strip() for v in config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"',", "with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}')", "c { e: 5, f: 6 } \"\"\" ) assert", "= ${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON", "pb: ${b.pa} } c: { } d: { pc: ${b.pa}", "== 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config =", "assert config['a'] == [ [5, 6, 1, 2], [3, 4,", "in bar} print(types, '(((((') assert '<NAME>' in names assert 'Homer\\'s", "= 8 } } \"\"\" ) assert config.get('a.b.c') == 7", "minute', period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a:", "temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf =", "b = {f: 4} a: [ ${b} {a: 1, b:", "def test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\" a.b = 3 a.b", "\"1\" bar: \"2\" # DO NOT CHANGE ANY OF THE", "[ {a: 1, b: 2}, {a: 3, c: 4}, ]", "== 45 assert 'g' not in config assert config['h'] ==", "def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.default-jvm-opts", "o2 = { foo : { b : 3 c", "x = {a: 1} x += {b: 2} \"\"\" )", "+= def \"\"\" ) assert config.get(\"x\") == \"abc def\" def", "= ${?c1} ${?c2} h = ${?c1} ${?c2} 1 \"\"\") assert", "} user=test_user pass=<PASSWORD> database { user = ${user} pass =", "a: [ [1, 2] [3, 4] ] \"\"\" ) assert", "c = foo ${x} bv d = foo ${x} 43", "\"east\"} ${data-center-generic} { cluster-size = 9, opts = \"-Xmx4g\" }", "= { 'a': 'abc\\ntest\\n', 'b': [1, 2, 3] } config", "assert config.get_list('t.g') is None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+',", ") assert config == { 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' }", "\"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3 =", "config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert config4.get('full_modules') == ['java',", "name = \"first domain\" } } www.example-ö.com { us {", "== ['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\" application.foo", "6] def test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2]", "== '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\" test =", "{c:python} \"\"\" ) assert config['common_modules'] == {'a': 'perl', 'b': 'java',", "f: 6 } \"\"\" ) assert config['a'] == {'a': 1,", "config = ConfigFactory.parse_string( \"\"\" { a: { b: 5 }", "d: ${a} bar \"\"\") assert config['c'] == 'foo 1' assert", "4 e = ${?a} g = ${?c1} ${?c2} h =", "} data-center-east = ${data-center-generic} {name = \"east\"} \"\"\" ) assert", "config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 =", ") assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east' config2", "\"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\") assert 'abc'", "assert [x.strip() for x in config['common_modules'].split() if x.strip(' ') !=", "[\"a\", \"b\"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\" e :", "a = [ \"a\", \"b\", ] b = # test", "${b} d: ${a} bar \"\"\") assert config['c'] == 'foo 1'", "b: 2 include package(\"my_module:my.conf\") \"\"\" ) # check that the", "a = 123 a = ${?test} a = 5 \"\"\"", "assert config['c'] == {'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config", "\"\"\"5\"\"\"') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo", "ConfigFactory.parse_string( \"\"\" a = 45 b = ${?c} d =", "'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\" a { d", "'test' } def test_from_dict_with_dict(self): d = { 'banana': 3, 'apple':", "11 // comment 12 \"\"\" ) assert config.get('c') == 'test'", "config1.get('d') == 5 config2 = ConfigFactory.parse_string( \"\"\" { a: {", "== '1 bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\" database", ".with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert {'a': 5, 'b': 5, 'c':", "= ConfigFactory.parse_string( \"\"\" string = ${string}def \"\"\", resolve=False ) result", "\"\"\" common_modules = [perl] host_modules = ${common_modules} aa \"\"\" )", "7 h.i { d: 5 } h.i { e:65 }", "a b c b = 5 b c = b", "perl \\ java \\ python \"\"\" ) assert [x.strip() for", "k { \"b.f.d\": 7 } \"\"\" ) assert config['\"a.b.c.d\"'] ==", "= abc ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules", "mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3 mon', '3 mon'),", "\"\"\" a { b = foo c = bar }", "config == { 'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment')", "= ConfigFactory.parse_string( \"\"\" // comment 1 # comment 2 {", "= merged.get(\"b\") assert len(b) == 2 assert b[0] == {'v2':", "assert config == { 'x': 5, 'b': 'test', 'a': 'foo", "foo: \"1\" bar: \"2\" # DO NOT CHANGE ANY OF", "d['orange'] = 2 config = ConfigFactory.from_dict(d) assert config == d", "test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\" { data-center-generic = { cluster-size:", "contents of both config files are available assert dict(config.as_plain_ordered_dict()) ==", "config.get_string('a.b') == 'test' assert config.get('t') == [1, 2, 3] def", "a.b = ${a.b} a.c = [1,2] a.c = ${a.c} a.d", "= ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' ==", "== 5 assert config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string(", "} bar : { foo : 43 } \"\"\" )", "foo: ${baz} bar: {r: 1, s: 2} baz: {s: 3,", "comment 2 { c = test // comment 0 g", "test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t = { c = 5", "assert config == json.loads(source) try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set',", "'c': 4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\" a:", "[${a}] pb: ${b.pa} } c: { } d: { pc:", "'c': {}, 'd': {'pc': [1]}, 'e': {'pa': [1], 'pb': [1]}", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} \"\"\" ) with", "[ include required(\"dummy.txt\") 3 4 ] \"\"\" ) def test_resolve_package_path(self):", "d = ${?c} 4 e = ${?a} g = ${?c1}", "= ${?STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment' }", "ConfigFactory.parse_string( \"\"\" list += [ 4, 5, 6 ] \"\"\",", "\"\"\" common_modules = [php, python] host_modules = ${common_modules} [java] \"\"\"", "= ${x} x = ${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self): '''", ") assert config.get(\"a\") == 'foo' assert set(config.keys()) == set(['a']) def", "ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as period except", "set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec", "config['a'] == 121.22 assert config['b'] == -121.22 assert config['c'] ==", "period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3 min', '3 min'), ('a:", "us { name = \"second domain\" } } \"\"\" config", "3 // foo.c should end up as 3 foo :", "= foo ${x} bv d = foo ${x} 43 \"\"\")", "= ConfigFactory.parse_string( \"\"\" a: 1 b: { pb: 5 }", "config2 = ConfigFactory.parse_string( \"\"\" a { include required(file(\"samples/animals.d/cat.conf\")) t =", "def test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR} \"\"\")", "{a: 1} x += {b: 2} \"\"\" ) assert config.get(\"x\")", "assert config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self):", "= ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_ordered_dict(self): d =", "test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\" e : ${a} { }", "'\\\\', 'tab': '\\t', 'no-tab': '\\\\t', 'newline': '\\n', 'no-newline': '\\\\n', 'cr':", "assert set(config.get(\"x\").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string(", "= ${database.host}\":\"${database.port} } database { host = ${?DB_HOST} } database", "\"\"\" string = ${string}def \"\"\", resolve=False ) result = config2.with_fallback(config1)", "ConfigFactory.parse_string( \"\"\" foo: 42 foo: ${?a} \"\"\", resolve=False) source =", "= [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC',", "must merge with its .overriden_value # if both are ConfigTree", "1 \"\"\" ) config = root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1", "${INT_VAR} \"\"\") assert config == { 'int_from_env': '5' } assert", "a = foo bar \"\"\") assert config == { 'a':", "test_parse_null(self): config = ConfigFactory.parse_string( \"\"\" a = null b =", "config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_ordered_dict(self): d", "c: bar \"\"\" ) assert config['b'] == ['a', 1, period(weeks=10),", "def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = \"a\" b", "assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo =", "\"fgh\" \"\"\") assert 'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2']", "b: 2} x = ${x} {c: 3} x = {z:", "= { 'a': {'d': 6} } assert expected == config_tree", "{b: 2} \"\"\" ) assert config.get(\"x\") == {'a': 1, 'b':", "v2: 3 } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2)", "5} \"4\"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a:", "} \"\"\" ) assert config.get('a.b.c') == 7 assert config.get('d') ==", "forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^',", "assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string(", "3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a']", "'the man' assert config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test'", "user = ${user} pass = ${pass} } \"\"\") assert config['database.user']", "{ name = \"east\" } data-center-east = ${data-center-generic} \"\"\" )", "def test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"dummy.txt\"", "def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [perl] \\", "\"a b c d e\"=test3 \"\"\" ) assert config.get('a') ==", "= ${b} b = ${c} c = ${a} \"\"\") def", "config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config = ConfigFactory.parse_string(", "[3, 4, 5, 6], [1, 2, 5, 6, 7, 8]", "1 b: { pb: 5 } \"\"\") assert 5 ==", "name = peopledb pass = <PASSWORD> } database { name", "x = {a: 1, b: 2} x = ${x} {c:", "assert config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\"", "{ user = ${user} pass = ${pass} } \"\"\") assert", "config3['a'] == [1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\"", "assert config['b'] == 5 assert config['c'] == 6 def test_assign_int(self):", "= [php, python] host_modules = [java] ${common_modules} [perl] full_modules =", "def test_concat_string(self): config = ConfigFactory.parse_string( \"\"\" a = a b", "common_modules = ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules", "assert config['x'] == 42 assert config['y'] == 42 @pytest.mark.xfail def", "config = ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}'", "config2 = ConfigFactory.parse_string( \"\"\" list += [ 4, 5, 6", "= test ${a.b.c} me e = 7 } \"\"\" )", "test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "= abc ${non_existent} def \"\"\" ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException):", "6 } config6 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException):", "= [perl] \\ [java] \\ [python] \"\"\" ) assert config['common_modules']", "ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@',", "'other.host.net' assert config2['database']['port'] == 433 assert config2['database']['url'] == 'other.host.net:433' def", "2} b: {c: 3} {d: 4} { c: 5 }", "period except Exception: from datetime import timedelta as period class", "application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"]", "\"\"\" config = ConfigFactory.parse_string( \"\"\" short = 12.12321 long1 =", "{c: 3} x = {z: 0} ${x} x = {y:", "input_string = u\"\"\" www.sample.com { us { name = \"first", "2, \"v2\": 3} b = merged.get(\"b\") assert len(b) == 1", "config = ConfigFactory.parse_string( \"\"\"o1 = { foo : { a", "'test str ' config3 = ConfigFactory.parse_string( u\"\"\" { a: {", "with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" no_trailing_ws", "data-center-east = { name = \"east\" } \"\"\" ) assert", "config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self):", "${o1} ${o2} \"\"\" ) assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b')", "as fdin: fdin.write('{ x : 10, y : ${x} }')", "{'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts':", "['php', 'python', 'java'] config2 = ConfigFactory.parse_string( \"\"\" common_modules = [php,", "config3.get('a.b.c') == 'str' assert config3.get('d') == 'test str me' assert", ") # check that the contents of both config files", "j = [1, 2, 3] u = 192.168.1.3/32 g =", "port = 433 } } \"\"\" ) assert config2['database']['host'] ==", ": 2 } \"\"\" ) assert config.get('foo') == {'a': 2,", "with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress", "ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name} \"\"\", resolve=False ) config6 =", "= ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR} \"\"\") assert config ==", "1} \"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a =", "2} assert b[1] == {'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self):", "{ cluster-size = 6 } data-center-east = { name =", "2 } \"\"\" ) assert expected == config2 def test_include_missing_required_file(self):", "[ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR'] ==", "\"c\" \"\"\" ) assert config['a'] == 'a' assert config['b'] ==", "assert config['foo'] == 42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2", ") assert config.get(\"x\") == [-3, -2, -1, 0, 1, 2,", "config1['a'] == [3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\"", "foo: ${?a} \"\"\", resolve=False) source = ConfigFactory.parse_string( \"\"\" b: 14", "with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?',", "config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string(", "config2.get(\"list\") == [1, 2, 3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self):", "\"\"\" ) assert config.get(\"x.x\") == [3, 4] assert config.get(\"x.y\") ==", "the module folder and necessary files (__init__ and config) os.mkdir(module_dir)", "== 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\" x =", ") assert config.get(\"x\") == [1, 2] def test_self_append_object(self): config =", "def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec ''' config =", "\"\"\" a { include required(file(\"samples/animals.d/cat.conf\")) t = 2 } \"\"\"", "def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR} \"\"\")", "= [1, 2] [3, 4] [ 5, 6 ] \"\"\"", "{ 'a': 'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config =", "2}, {a: 3, c: 4} ${b}, {a: 3} ${b} {c:", "assert config3['a'] == [1, 2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string(", "ConfigFactory.parse_string( \"\"\" a: 1 b: ${c} { pa: [${a}] pb:", "\"\"\" ) assert config.get(\"a\") == 'foo' assert set(config.keys()) == set(['a'])", "a = null b = [null] \"\"\" ) assert config.get('a')", "assert config['short'] == 12 assert isinstance(config['short'], int) assert config['long'] ==", "(ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException)", "ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir", "'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create the module folder", "test_parse_override(self): config = ConfigFactory.parse_string( \"\"\" { a: { b: {", "for bar in bars if 'type' in bar} print(types, '(((((')", "timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock import", "[1, 2, 3, 4] def test_self_append_string(self): ''' Should be equivalent", ") assert config['a'] == [ [1, 2], [3, 4] ]", "== 'test str ' config3 = ConfigFactory.parse_string( u\"\"\" { a:", "\"\"\" STRING_VAR = ${?STRING_VAR} \"\"\") assert config == { 'STRING_VAR':", "= test ${a.b.e} } \"\"\" ) assert config2.get('a.b.c') == 'str'", "= true e.y = { f: 7 g: \"hey dude!\"", "('a: 11 day', period(days=11)), ('a: 12 d', period(days=12)), ('a: 110", "with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {f: 5} common_modules ${a}", "{ host = localhost port = 5432 user = people", "} \"\"\") assert 5 == config.b.pb def test_escape_quote(self): config =", "\"\"\" common_modules = perl \\ java \\ python \"\"\" )", "= \"\"\"5\"\"\"') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config =", "= ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR} \"\"\") assert config ==", "== 1 assert config.get('a.c') == 2 assert config.get('b.c') == 5", "import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException,", "= ConfigFactory.parse_string( \"\"\" a { include required(file(\"samples/animals.d/cat.conf\")) t = 2", "int) assert config['negative'] == -15 def test_assign_float(self): config = ConfigFactory.parse_string(", "7 g: \"hey dude!\" h: hey man i = \\\"\\\"\\\"", "and 't' in config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self):", "x = ${x} [3,4] x = [-1, 0] ${x} [5,", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} x = ${x} \"\"\"", "assert config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self): config =", "config3 = ConfigFactory.parse_string( \"\"\" { a: { b: { c", "} } d = test ${a.b.c} me e = 7", "== 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\" a {", "nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)),", "\"\"\" x = ${x} {y: 1} x = ${x.y} \"\"\"", "def test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\" a: [ {a: 1,", "\"123\" assert config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\") == \"12\" assert", "x.strip(' ') != ''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self):", "x = [-3, -2] ${x} \"\"\" ) assert config.get(\"x\") ==", "== period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\" a: foo", "database { host = ${?DB_HOST} } database { host =", "= peopledb pass = <PASSWORD> name = ${?NOT_EXISTS} pass =", "config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self):", "localhost port = 5432 user = people name = peopledb", "should end up as 4 bar : { a :", "s', period(seconds=6)), ('a: 6 sec', '6 sec'), ('a: 7 hours',", "assert config == [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert", "4}, {'a': 3, 'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self):", "config = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "\"\"\" { a: { b: { c = ${e} }", "expected_result == config def test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\" //", "'str ' config2 = ConfigFactory.parse_string( \"\"\" { a: { b:", "${a.b.c} } \"\"\" ) assert config2.get('a.b.c') == 5 assert config2.get('d')", "config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h')", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = ${common_modules} 55 \"\"\"", "3 min', '3 min'), ('a: 4 seconds', period(seconds=4)), ('a: 5", "= ${?a}foo \"\"\" ) assert config.get(\"a\") == 'foo' assert set(config.keys())", "'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm", "config2 == { 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name':", "host_modules = aa ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "== ['java', 'php', 'python'] config3 = ConfigFactory.parse_string( \"\"\" common_modules =", "3, 'd': 4} assert config['c'] == {'e': 5, 'f': 6}", "== { 'name': 'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string(", "[1]}, 'c': {}, 'd': {'pc': [1]}, 'e': {'pa': [1], 'pb':", "} } assert expected_result == config def test_parse_with_comments(self): config =", "= { cluster-size = 6 } data-center-east = ${data-center-generic} data-center-east", "\"\"\" database { name = peopledb pass = <PASSWORD> }", "{a:perl} \\ {b:java} \\ {c:python} \"\"\" ) assert config['common_modules'] ==", "def test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: ${c}", "config = ConfigFactory.parse_string( \"\"\" x += def \"\"\" ) assert", "('a: 1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2 month',", "b = merged.get(\"b\") assert len(b) == 2 assert b[0] ==", "{ a: { b: { c = 5 } }", "u\"\"\" longName: \"long \"${?name} \"\"\", resolve=False ) config6 = config4.with_fallback(config5)", "test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} \"\"\" )", "'\\\\t', 'newline': '\\n', 'no-newline': '\\\\n', 'cr': '\\r', 'no-cr': '\\\\r', 'windows':", "\"\"\" ) (one, two, three) = config.get(\"x\") assert one ==", "with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\"", "config_tree == { 'a': ['foo\"', \"bar\"] } def test_pop(self): config_tree", "ConfigFactory.parse_string( \"\"\" a: [ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert", "b2 : ${x} {v2: 3} b += [${b2}] \"\"\", resolve=False", "is None def test_parse_override(self): config = ConfigFactory.parse_string( \"\"\" { a:", "= ${a.b}\" \"${a.b} a.d = baz \"\"\" ) assert config['a.b']", "''' config = ConfigFactory.parse_string( \"\"\" x = abc x +=", "'c': [1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config =", "'my_module' config = ConfigFactory.parse_string( \"\"\" a: 1 b: 2 include", "config1 assert result is not config1 # test no mutation", "== [\"a\", \"b\"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\" e", "def test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR} \"\"\")", "\"i\": { \"m\": 7, \"d\": 5, \"e\": 65 } }", "ConfigFactory.parse_string( \"\"\" a: {{ c: 3 include \"{tmp_file}\" d: 4", "= null } \"\"\" ) assert config.get_string('t.c') == '5' assert", "} def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo", "2 long with be a long assert config['short'] == 12", "5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [php,", "assert b[1] == {'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1", "{ v1: 1 } b1 : {v2: 2 } b", "\"b\" c = \"c\" \"\"\" ) assert config['a'] == 'a'", "test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the man, d=woof,", "as 4 bar : { a : ${foo.d}, b :", "config3 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3 include \"{tmp_file}\"", "a = 1 mid.b = 1 \"\"\" ) config =", "config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s'] == 3 def", "DO NOT CHANGE ANY OF THE ABOVE SETTINGS!\"\"\") assert config_tree", "110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond',", "config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y", "${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] ==", "\"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b1 : {", "== 1 assert b[0] == {\"v1\": 2, \"v2\": 3} def", "5} common_modules ${a} {perl: 1} \"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException):", "'5 5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\" data-center-generic", ") assert config.get_string('t.c') == '5' assert config.get_int('t.c') == 5 assert", "2 } b = [${b1}] \"\"\", resolve=False ) config2 =", "= [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR']", "assert config.get('g') == '6 test' assert config.get('a.b') == 'test' assert", "g.h.k: f d } h.i.m = 7 h.i { d:", "${foo.a} foo : { a : 2 } \"\"\" )", "ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\": 2, \"v2\": 3} b =", "== d def test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1 = {", "== [ {'a': 1, 'b': 2}, {'a': 3, 'c': 4}", "bar} a.d = ${a.d} \"\"\" ) assert config.get(\"a\") == {'b':", "c = test // comment 0 g = 6 test", "x = [-1, 0] ${x} [5, 6] x = [-3,", "3 } config4 = ConfigFactory.parse_string( \"\"\" name: foo \"\"\" )", "\"\"\" a.b = 4 a.d = 3 \"\"\" ) config3", "def test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: {", "= ConfigFactory.parse_string( \"\"\" a = 5 b=${a}${a} c=${a} ${a} \"\"\"", "\"{ws}// comment \"\"\".format(ws=' ')) assert config == { 'no_trailing_ws': \"foo", "2 { c = test // comment 0 g =", "'c': 1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example", "${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite}", "x = ${x.y} \"\"\" ) assert config.get(\"x.y\") == 1 assert", "assert config.get('foo') == {'a': 2, 'c': 1} assert set(config.keys()) ==", "with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string(", "= ConfigFactory.parse_string( \"\"\"o1 = { foo : { a :", "\\ {b:java} \\ {c:python} \"\"\" ) assert config['common_modules'] == {'a':", "test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x =", "# comment 4 b: test, # comment 5 } #", "} config6 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = { f: 5 g }", "{name = \"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\" )", "ConfigFactory.parse_string( \"\"\" b = {f: 4} a: [ ${b} {a:", "4} assert config['c'] == {'e': 5, 'f': 6} def test_substitutions_overwrite(self):", "10 days', period(days=10)), ('a: 11 day', period(days=11)), ('a: 12 d',", "config['a'] == {'a': 1, 'b': 2} assert config['b'] == {'c':", "comment 1 # comment 2 { c = test //", "{ \"d\": { \"c\": 5 } } k { \"b.f.d\":", "== ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1,", "d['pear'] = 1 d['orange'] = 2 config = ConfigFactory.from_dict(d) assert", "bar : { foo : 43 } \"\"\" ) assert", "config = ConfigFactory.parse_string( \"\"\" a = foo bar \"\"\") assert", ") with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} abc \"\"\"", "port = 5432 user = people name = peopledb pass", "open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as fdin: fdin.write(\"{c: 3}\")", "${data-center-generic} {name = \"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size') == 6", "set so show raise an exception with pytest.raises(ConfigMissingException): config.get('b') def", "config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string(", "ConfigFactory.parse_string( \"\"\" x = [1,2] x += [3,4] \"\"\" )", "[1, 2, 5, 6, 7, 8] ] def test_invalid_assignment(self): with", "5, 'b': '55', 'c': '5 5' } def test_dict_substitutions(self): config", "the config and include the other config file from 'my_module'", "121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = \"a\"", "{g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush()", "\"\"\" config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert", ") assert config == { 'a': 5, 'b': '55', 'c':", "1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0])", "config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a", "c = 5 } } d = test ${a.b.c} }", "'f': 4}, {'a': 3, 'c': 4, 'f': 4}, {'a': 3,", "resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b1 : { v1:", "[1, 2] ${b} [7, 8] ] \"\"\" ) assert config['a']", "config['a'] == [ [5, 6, 1, 2], [3, 4, 5,", "test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say')", "{d: 4} { c: 5 } \"\"\" ) assert config.get('a.b')", "assert \"abc\" not in str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string(", "= ConfigFactory.parse_string( \"\"\" { data-center-generic = { cluster-size: 8 }", ") # b is not set so show raise an", "ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self): config1", "config['a'] == 4 assert config['b'] == 5 assert config['c'] ==", "c d=test2, \"a b c d e\"=test3 \"\"\" ) assert", "hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*',", "micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)),", "[perl] host_modules = aa ${common_modules} bb \"\"\" ) def test_self_ref_substitution_array(self):", "5 } } k { \"b.f.d\": 7 } \"\"\" )", "b: 14 \"\"\") config = unresolved.with_fallback(source) assert config['foo'] == 42", "= {z: ${x}} \"\"\" ) assert config.get(\"x.x\") == [3, 4]", "import relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)), ('a: 1months',", "host_modules = aa ${common_modules} bb \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", "{ \"j\": { \"u\": 5 }, \"d\": 4, \"k\": \"f", "'*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey man{}\".format(forbidden_char) config", "= 4 d['pear'] = 1 d['orange'] = 2 config =", "3 ] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list =", "45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a", "c: 3, d: 4,} c = { e: 5, f:", "'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string(", ") def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow' assert", "== 2 assert three == 3 def test_self_ref_substitution_dict_path(self): config =", "ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert len(bars) == 10 names =", "${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right'", "ConfigFactory.parse_string( \"\"\" num = 3 retries_msg = You have ${num}", "'*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char))", "= {f: 5} common_modules ${a} {perl: 1} \"\"\") def test_invalid_dict(self):", "config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz} bar: {r: 1,", "}') fdin.flush() config = ConfigFactory.parse_string( \"\"\" { a : {", "= \"east\" } data-center-east = ${data-center-generic} \"\"\" ) assert config6['data-center-east']", "[ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string(", "[1, 2, 3] def test_missing_config(self): config = ConfigFactory.parse_string( \"\"\" a", "\"\"\" ) assert config.get(\"x\") == [-3, -2, -1, 0, 1,", "create the module folder and necessary files (__init__ and config)", "\"\"\" ) assert config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz') == [\"a\",", "\"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\":", "[${x}, 2, 3] \"\"\" ) (one, two, three) = config.get(\"x\")", "dict = { x: 1 } \"\"\" ) config2 =", "def test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\" database { name =", "('a: 10 days', period(days=10)), ('a: 11 day', period(days=11)), ('a: 12", "# test # test2 5 c = 6 \"\"\" )", "bv d = foo ${x} 43 \"\"\") assert config ==", "ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)),", "fdin.write('{a: 1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a:", "] b = # test # test2 [ 3, 4,]", "{'foo': 42, 'baz': 42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self):", "\"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2] def test_include_missing_file(self): config1", "def test_assign_float(self): config = ConfigFactory.parse_string( \"\"\" a = 121.22 b", "config = ConfigFactory.parse_string( \"\"\" a: [ {a: 1, b: 2},", "'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo", "8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g']", "2] def test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\" a: [ include", "forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def", "= ${common_modules} [java] \"\"\" ) assert config.get('host_modules') == ['php', 'python',", "assert config['c'] == {'e': 5, 'f': 6} def test_substitutions_overwrite(self): config1", "\"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules", "7 assert config.get('t.j') == [1, 2, 3] assert config.get('t.u') ==", "121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string(", "assert 'Homer\\'s favorite coffee' in names assert 'milk' in types", "== 5 config2 = ConfigFactory.parse_string( \"\"\" { database { host", "= 121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2 =", "\"\"\" ) assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n'", "'<PASSWORD>' def test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\" database { name", "python] host_modules = [java] ${common_modules} [perl] \"\"\" ) assert config3.get('common_modules')", "assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') ==", "= ConfigFactory.parse_string( \"\"\" a = [ \"a\", \"b\", ] b", ": ${bar.foo} } bar : { foo : 43 }", "config1 = ConfigFactory.parse_string( \"\"\" x : { v1: 1 }", "config = ConfigFactory.parse_string( \"\"\" a = 121.22 b = -121.22", "\"4\"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: {", "config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@',", "assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo =", "check that the contents of both config files are available", "4,] c = [ 5, 6 ] \"\"\" ) assert", "= aa ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "ConfigFactory.parse_string( \"\"\" x = ${x} {y: 1} x = ${x.y}", "a = \"a\" b = # test # test2 \"b\"", "\"\"\" x = 5 b = test a = foo", "('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112", "'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\" main_language =", "== 6 assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( \"\"\"", "{ g.h.j.u: 5 g { h.d: 4 } g.h.k: f", "def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree == {", "== { 'longName': 'long foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self):", "1 and config['b'] == 1 def test_object_field_substitution(self): config = ConfigFactory.parse_string(", "\\\"\\\"\\\" \"first line\" \"second\" line \\\"\\\"\\\" } j = [1,", "== { 'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree", "test_self_append_non_existent_string(self): ''' Should be equivalent to x = ${?x} def", "'t': 2 } } assert expected == config config2 =", "ConfigFactory.parse_string( \"\"\" x = {y: {z: 1}} x = ${x.y}", "ConfigFactory.parse_string( \"\"\" a { include required(\"samples/animals.d/cat.conf\") t = 2 }", "= peopledb pass = <PASSWORD> } user=test_user pass=<PASSWORD> database {", "${a.b.c} me } \"\"\" ) assert config3.get('a.b.c') == 5 assert", "a { include required(file(\"samples/animals.d/cat.conf\")) t = 2 } \"\"\" )", "@mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR =", "3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with", "assert config == { 'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ,", "42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert", "} } }, \"h\": { \"i\": { \"m\": 7, \"d\":", "ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_nested_dict(self): d = OrderedDict()", "${string}def \"\"\", resolve=False ) result = config2.with_fallback(config1) assert result.get(\"string\") ==", "'python'] assert config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4 =", "def test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\" common : { }", "= foo bar \"\"\") assert config == { 'a': 'foo", "} def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree ==", "6}, ] \"\"\" ) assert config['a'] == [ {'a': 1,", "12 long = 12321321837612378126213217321 negative = -15 \"\"\" ) #", "foo ${x} 43 \"\"\") assert config == { 'x': 5,", "# load the config and include the other config file", "\"\"\" ) config3 = config1.with_fallback(config2) assert config3['a'] == { 'b':", "assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with", "3, 4, 5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\"", "{ a: { b: 5 } } \"\"\" ) assert", "'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] }", "host_modules = 55 ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "as period class TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t", "file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2] config3", "3 ] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list +=", "'php', 'python'] config3 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python]", "\"\"\", resolve=False) source = ConfigFactory.parse_string( \"\"\" b: 14 \"\"\") config", "b = [null] \"\"\" ) assert config.get('a') is None assert", "{y: {y: 1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\")", "ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 == config_tree.pop('a.b', 5) assert", "${x} {y: 1} x = ${x.y} \"\"\" ) def test_self_ref_substitution_object(self):", "names = {bar['name'] for bar in bars} types = {bar['type']", "config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name')", "55 ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules = ${common_modules} [java]", "${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ]", "= ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow'", "config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string(", "ConfigFactory.parse_string( \"\"\" x = abc x += def \"\"\" )", "6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = 123", "string = abc \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" string", "ConfigFactory.parse_string( \"\"\" short = 12 long = 12321321837612378126213217321 negative =", "def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\" a: foo b: [a,", "1, 2], [3, 4, 5, 6], [1, 2, 5, 6,", "== {'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\" x", "d['pear'] = 1 d['tree'] = { 'a': 'abc\\ntest\\n', 'b': [1,", ") assert config.get(\"x.x\") == [3, 4] assert config.get(\"x.y\") == [5,", "def test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\" a { include required(\"samples/animals.d/cat.conf\")", "3, 'c': [1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config", "'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\" x = {y:", "config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config", "= [java] ${common_modules} [perl] \"\"\" ) assert config3.get('common_modules') == ['php',", "a: { b: { c = 5 } } d", "{ b : 3 c : 4 } } o3", "= config2.with_fallback(config1) assert config2.get(\"list\") == [1, 2, 3, 4, 5,", "{ 'longName': 'long foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1", "config['a'] == ['a', 'b'] assert config['b'] == ['c', 'd'] assert", "include-database=true \"\"\") assert config == { 'include-database': True } def", "== {'a': 2, 'c': 1} assert set(config.keys()) == set(['foo']) def", "'5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo']", "NOT CHANGE ANY OF THE ABOVE SETTINGS!\"\"\") assert config_tree ==", "[ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string(", "assert config['a.d'] == \"baz\" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\"", "\"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b =", "{ 'x': 5, 'b': 'test', 'a': 'foo bar test dummy',", "''' Example from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\"", "is None assert config.get_bool('t.g') is None assert config.get_list('t.g') is None", "b c d e') == 'test3' def test_dict_merge(self): config =", "def \"\"\" ) assert config.get(\"x\") == \" def\" def test_self_append_nonexistent_array(self):", "\"{ws} trailing_ws_with_comment = \"foo\" \"bar \"{ws}// comment \"\"\".format(ws=' ')) assert", "= ConfigFactory.parse_string( \"\"\" A = ${Test} Test { field1 =", "config = ConfigFactory.parse_string( \"\"\" a { include required(\"samples/animals.d/cat.conf\") t =", "end up as 3 foo : { c : ${bar.b},", "a { include required(\"samples/animals.d/cat.conf\") t = 2 } \"\"\" )", "java \\ python \"\"\" ) assert [x.strip() for x in", "bar } a.c = ${a.b}\" \"${a.b} a.d = baz \"\"\"", "\"h\": { \"i\": { \"m\": 7, \"d\": 5, \"e\": 65", "x = ${x.y} \"\"\" ) assert config.get(\"x.y\") == {'z': 1}", "ConfigFactory.parse_string( \"\"\" a = 123 a = ${?test} a =", "test_assign_float(self): config = ConfigFactory.parse_string( \"\"\" a = 121.22 b =", "config1 = ConfigFactory.parse_string( \"\"\" dict = { x: 1 }", "config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self): config1 =", "None assert config.get('b')[0] is None def test_parse_override(self): config = ConfigFactory.parse_string(", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = 55 ${common_modules} \"\"\"", "\"d\" = true e.y = { f: 7 g: \"hey", "= ${a.b.c} f = ${a.b.e} } \"\"\" ) assert config1.get('a.b.c')", "config4 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules =", "{ include \"\"\" + '\"' + fdin.name + \"\"\"\" }", "3 d: 4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert config2['a']", "config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative'] == -15", "== 9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g'", "} o2 = { foo : { b : 3", "a: [ include required(\"dummy.txt\") 3 4 ] \"\"\" ) def", "'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a:", "config['d'] == -.54 def test_sci_real(self): \"\"\" Test scientific expression of", "= 1 d['orange'] = 2 config = ConfigFactory.from_dict(d) assert config", "{ a: 1, b: 2, } b # test #", "d: 6}') assert 3 == config_tree.pop('a.b', 5) assert 5 ==", "{ 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc':", "assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self):", "0, 'y': -1, 'd': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string(", "${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent}", "def test_parse_null(self): config = ConfigFactory.parse_string( \"\"\" a = null b", "${common_modules} 55 \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "= [1, 2, 3] u = 192.168.1.3/32 g = null", "config['c'] == {'e': 5, 'f': 6} def test_substitutions_overwrite(self): config1 =", "\"\"\" { a: { b: { c = 5 }", "${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm'", ") assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') == ['java',", "== {'foo': 42, 'baz': 42} assert set(config.keys()) == set(['bar']) def", "b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 =", "+ \"\"\"\" } a : { x : 42 }", "[1]}, 'e': {'pa': [1], 'pb': [1]} } def test_assign_next_line(self): config", "\"\"\" x : { v1: 1 } b1 : {v2:", "= tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir,", "[5, 6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a =", "dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\" x =", "config.get(\"bar\") == {'foo': 43, 'baz': 43} assert set(config.keys()) == set(['bar'])", "config1 # test no mutation on config2 assert \"abc\" not", "three) = config.get(\"x\") assert one == {'x': [3, 4]} assert", "ConfigFactory.parse_string( \"\"\" a : { } b : 1 c", "assert config['database.user'] == 'test_user' assert config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self):", "d: ${a} ${b} d: ${a} bar \"\"\") assert config['c'] ==", "\"\"\" x = abc x += def \"\"\" ) assert", "both are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz}", "\"\"\") assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false')", "\"\"\" ) assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east'", "{ 'a': ['foo\"', \"bar\"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b:", "\\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] ==", "an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string(", "is None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^',", "config2 = config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic': {'cluster-size': 8},", "${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') ==", "== 12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative'] == -15 def", "\"\"\" { a: { b: { c = str e", "'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert", "expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}')", "required(\"dummy.txt\") 3 4 ] \"\"\" ) def test_resolve_package_path(self): path =", "Example from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" //", "\"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self): config =", "test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1 = { foo : {", "expected_res config3 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3 include", "foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\"", "as 3 foo : { c : ${bar.b}, d :", ": { v1: 1 } b1 : {v2: 2 }", "config == [1, 2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string(", "null b = [null] \"\"\" ) assert config.get('a') is None", "def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = [5, 6]", "\"wrong\" compilerCommon : ${common} { VAR : ${var} } substrate-suite:", "config = ConfigFactory.parse_string( \"\"\" x += {a: 1} \"\"\" )", "= 3 // foo.c should end up as 3 foo", "2] ${b} [7, 8] ] \"\"\" ) assert config['a'] ==", "expected_res = { 'a': 1, 'b': 2, 'c': 3, 'd':", ") assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert", "config2.get(\"dict\") == {'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 =", "ConfigFactory.parse_string( \"\"\" a: [ include \"dummy.txt\" 3 4 ] \"\"\"", "1 assert config.get('a.c') == 2 assert config.get('b.c') == 5 assert", "} } \"\"\" config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first", "test ${a.b.c} } \"\"\" ) assert config2.get('a.b.c') == 5 assert", "\"\"\" a: {b: 1} a: {c: 2} b: {c: 3}", "'a': 'abc\\ntest\\n', 'b': [1, 2, 3] } config = ConfigFactory.from_dict(d)", "= ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name} \"\"\", resolve=False ) config6", "\"\"\") assert config == { 'a': 'foo bar' } def", "ConfigFactory.parse_string( \"\"\" common_modules = perl \\ java \\ python \"\"\"", "{'e': 5, 'f': 6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\"", ") assert expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string(", "ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config", "b[0] == {'v2': 2} assert b[1] == {'v1': 1, 'v2':", "assert config['a'] == {'a': 1, 'b': 2} assert config['b'] ==", "} def test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\", "same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source =", "config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\" a:", "112 micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a: 114 us',", "def test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec ''' config =", "2]') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"{tmp_file}\"", "5 assert config.get('b.d') == 4 def test_concat_string(self): config = ConfigFactory.parse_string(", "= [php, python] host_modules = [java] ${common_modules} \"\"\" ) assert", "= ${user} pass = ${pass} } \"\"\") assert config['database.user'] ==", "\"other.host.net\" port = 433 } } \"\"\" ) assert config2['database']['host']", ") assert config.get(\"x\") == [1, 2, 3, 4] def test_self_append_string(self):", "== { 'b': 1, 'c': 2, 'd': 3 } config4", "3 == config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5) expected", "c = .54 d = -.54 \"\"\" ) # on", "2, v2: 3 } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1,", "config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with", "common_modules = [php, python] host_modules = [java] ${common_modules} [perl] full_modules", "test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp()", "= baz \"\"\" ) assert config['a.b'] == \"foo\" assert config['a.c']", "[1, 2, 3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a =", "abc ${non_existent} def \"\"\" ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", "assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir')", "3 c : 4 } } o3 = ${o1} ${o2}", "\"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\" expected = {", "languages = [java, ${main_language}] \"\"\" ) assert config.get('languages') == ['java',", "period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a: 6 sec', '6 sec'),", "relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a:", "'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5", "${list} [ 4, 5, 6 ] \"\"\", resolve=False ) config2", "'str' assert config2.get('d') == 'test str' assert config2.get('f') == 'test", "\"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" string = ${string}def \"\"\",", "} b1 : {v2: 2 } b = [${b1}] \"\"\",", "def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1", "[1,2] a.c = ${a.c} a.d = {foo: bar} a.d =", "1 } } foo : ${foo.a} foo : { a", "2, 3 ] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list", "in config['common_modules'].split() if x.strip(' ') != ''] == ['perl', 'java',", "test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts')", "'y': -1, 'd': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\"", "11 day', period(days=11)), ('a: 12 d', period(days=12)), ('a: 110 microseconds',", "test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\" a = // abc abc", "2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res =", "${STRING_VAR} \"\"\") assert config == { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ,", "config = ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\"", "are available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c':", "== value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\" { a:", "unresolved.with_fallback(source) assert config['foo'] == 42 config = source.with_fallback(unresolved) assert config['foo']", "\"\"\" ) assert config['a'] == ['a', 'b'] assert config['b'] ==", "= 121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3 \"\"\" )", "{ field1 = 1 field2 = ${Test.field1}\"2\" field3 = ${Test.field2}\"3\"", "== ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\"", "assert config2.get('a.b.c') == 5 assert config2.get('d') == 'test 5' config3", "ConfigFactory.parse_string( \"\"\" database { name = peopledb pass = <PASSWORD>", "config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\" a", "\" def\" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\" x +=", "2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3", "= ConfigFactory.parse_string( \"\"\" x = {a: 1} x += {b:", "2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\" dict", "1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" num = 3", "${x} [5, 6] x = [-3, -2] ${x} \"\"\" )", "config = ConfigFactory.parse_string( \"\"\" common_modules = perl \\ java \\", "[1, 2, 3] u = 192.168.1.3/32 g = null }", "def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2, 3]\") assert config ==", "{ include required(\"samples/animals.d/cat.conf\") t = 2 } \"\"\" ) expected", "== 'test2' assert config.get('a b c d e') == 'test3'", "= ConfigFactory.parse_string( \"\"\" a = [ 1, 2, ] b", "== '192.168.1.3/32' assert config.get_int('t.g') is None assert config.get_float('t.g') is None", "https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic': {'cluster-size':", "{ a: 1, b: 2, } b = # test", "== set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x =", "'\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string(", "\"bar\" dummy \"\"\") assert config == { 'a': 'foo bar", "assert config.get(\"x\") == [1, 2, 3, 4] def test_self_append_string(self): '''", "def test_assign_int(self): config = ConfigFactory.parse_string( \"\"\" short = 12 long", "('a: 3m', period(minutes=3)), ('a: 3 min', '3 min'), ('a: 4", ") config = root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1 and config['b']", "\"\"\" e : ${a} { } \"\"\", resolve=False ) with", "= 3 a.b = ${a.b} a.b = ${a.b} a.c =", "== 'right' assert config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted", "6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert", "== .54 assert config['d'] == -.54 def test_sci_real(self): \"\"\" Test", "pass = <PASSWORD> name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS} }", "config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars =", "def test_with_comment_on_last_line(self): # Adress issue #102 config_tree = ConfigFactory.parse_string(\"\"\" foo:", "@mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\" string_from_env =", "\"\"\" common_modules = [php, python] host_modules = [java] ${common_modules} \"\"\"", "config = ConfigFactory.parse_string( \"\"\" a: [ [1, 2] [3, 4]", "${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR'] ==", "config = ConfigFactory.parse_string( \"\"\" short = 12 long = 12321321837612378126213217321", "== {'x': [3, 4]} assert two == 2 assert three", "ConfigFactory.parse_string( \"\"\"t = { c = 5 \"d\" = true", ": { b : 3 c : 4 } }", "\"\"\" a: {{ c: 3 d: 4 include \"{tmp_file}\" }}", "4, 'f': 4}, {'a': 3, 'c': 6, 'f': 4} ]", "== {'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string(", "= peopledb pass = <PASSWORD> } database { name =", "= os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create the", "assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON", "= { e: 5, f: 6 } \"\"\" ) assert", "${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size') ==", "pa: [${a}] pb: ${b.pa} } c: { } d: {", "${a.b} a.b = ${a.b} a.c = [1,2] a.c = ${a.c}", "\"\"\" ) assert config.get('a') == [1, 2, 3, 4, 5,", "{ c = 5 } } d = test ${a.b.c}", "{ 'garfield': { 'say': 'meow' }, 't': 2 } }", "{'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\"", "5 } \"\"\" ) assert config.get('a.b') == 1 assert config.get('a.c')", "x = ${x} {c: 3} x = {z: 0} ${x}", "= ConfigFactory.parse_string( \"\"\" a = 121.22 b = -121.22 c", "3, d: 4,} c = { e: 5, f: 6", "== 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config =", "7 hours', period(hours=7)), ('a: 8 hour', period(hours=8)), ('a: 9 h',", "{d: 4} \"\"\" ) assert config.get(\"x\") == {'a': 1, 'b':", "config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def", "\"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"dict\") == {'x':", "\"\"\" { a: { b: 5 } } \"\"\" )", "= # test # test2 \"b\" c = \"c\" \"\"\"", "1 assert b[0] == {\"v1\": 2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self):", "assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42) == 42 assert", "config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\" common : {", "ConfigFactory.parse_string( \"\"\" x = 5 b = test a =", "pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char):", "10 weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a: 12 w',", "= foo ${x} 43 \"\"\") assert config == { 'x':", "ConfigFactory.parse_string( \"\"\" a = 4 b = # test #", "('a: 5 second', period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a: 6", "{'a': 1, 'b': 2} assert config['b'] == {'c': 3, 'd':", "def test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\" a: [ [1, 2]", "d = 8 } } \"\"\" ) assert config.get('a.b.c') ==", "test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl:", "f d } h.i.m = 7 h.i { d: 5", "'f': 6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a =", "} assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config", "def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [ \"a\",", "43 \"\"\") assert config == { 'x': 5, 'b': 'test',", "4 a.d = 3 \"\"\" ) config3 = config1.with_fallback(config2) assert", "me' assert config3.get('f') == 'test str me' def test_string_substitutions_with_no_space(self): config", "\"\"\" name: foo \"\"\" ) config5 = ConfigFactory.parse_string( u\"\"\" longName:", "assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def", "= ${a.c} a.d = {foo: bar} a.d = ${a.d} \"\"\"", "'test' assert config.get('a b c d') == 'test2' assert config.get('a", "resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\")", "test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = { a: 1,", "'bar': '2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert", "[1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string(", "42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from", "= 8000 url = ${database.host}\":\"${database.port} } database { host =", "'a').close() with open(module_conf, 'w') as fdin: fdin.write(\"{c: 3}\") # add", "${a.b.c} } \"\"\" ) assert config1.get('a.b.c') == 5 assert config1.get('d')", "config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( \"\"\" a = null", "man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") == value def", "'pb': [1]}, 'c': {}, 'd': {'pc': [1]}, 'e': {'pa': [1],", "= ${?pass} } \"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass']", "assert config['c'] == .54 assert config['d'] == -.54 def test_sci_real(self):", "6, 'f': 4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\"", "'pb': [1]} } def test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\" a", "assert 'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh'", "spec ''' config = ConfigFactory.parse_string( \"\"\" foo : { a", "data-center-east = ${data-center-generic} \"\"\" ) assert config6['data-center-east'] == { 'name':", "ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"') with", "def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a: 1,", "\"\"\" short = 12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1", "= ConfigFactory.parse_string( \"\"\" x += {a: 1} \"\"\" ) assert", ": {v2: 2 } b = [${b1}] \"\"\", resolve=False )", "config['negative'] == -15 def test_assign_float(self): config = ConfigFactory.parse_string( \"\"\" a", "3, d: 4,} c { e: 5, f: 6 }", "== -15 def test_assign_float(self): config = ConfigFactory.parse_string( \"\"\" a =", "7 2, # comment 8 3, # comment 9 ]", "test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] except Exception:", "bar : { a : ${foo.d}, b : 1 }", "OF THE ABOVE SETTINGS!\"\"\") assert config_tree == { 'foo': '1',", "comment 3 a: { # comment 4 b: test, #", "config.get('a b c d e') == 'test3' def test_dict_merge(self): config", "{ baz: ${base.bar} [\"b\"] } sub2: ${sub} \"\"\" ) assert", "\"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self): config", "1, b: 2, } b = # test # test2", "' config3 = ConfigFactory.parse_string( u\"\"\" { a: { b: {", "'-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\" application.foo =", "{y: {z: 1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\")", "{ \"m\": 7, \"d\": 5, \"e\": 65 } } }", "= 3 d['apple'] = 4 d['pear'] = 1 d['tree'] =", "= { b: 1 c: 2 } \"\"\" ) config2", "== 1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" num =", "3, 'd': 4} assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self):", "assert config.get('a') == 1 assert config.get('b') == 'abc' assert config.get('c')", "tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x : 10, y : ${x}", "12 d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a: 111 microsecond',", "{'c': 3, 'd': 4} assert config['c'] == {'e': 5, 'f':", "['java', 'php', 'python', 'perl'] assert config4.get('full_modules') == ['java', 'php', 'python',", "config2 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules =", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = 55 ${common_modules}", "{ c: 5 } \"\"\" ) assert config.get('a.b') == 1", "config['a'] == 1 and config['b'] == 1 def test_object_field_substitution(self): config", "/abc/cde3: \"fgh\" \"\"\") assert 'abc' == config['/abc/cde1'] assert 'cde' ==", "\"e\", \"f\" ] \"\"\" ) assert config['a'] == ['a', 'b']", "host = localhost port = 5432 user = people name", "== 42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{", "comment 0 g = 6 test # comment 0 #", "int_from_env = ${INT_VAR} \"\"\") assert config == { 'int_from_env': '5'", "\"foo\" assert config['a.c'] == \"foo foo\" assert config['a.d'] == \"baz\"", "def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"5\"') assert config['foo'] ==", "${a} \"\"\") def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a =", "{ include required(file(\"samples/animals.d/cat.conf\")) t = 2 } \"\"\" ) assert", "assert config.get(\"x\") == [1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string(", "= ConfigFactory.parse_string( \"\"\" a = [1, 2] [3, 4] [", "line\"', '\"second\" line', ''] assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f')", "application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts}", "ConfigFactory.parse_string( \"\"\" database { host = localhost port = 5432", "5 } \"\"\") assert 5 == config.b.pb def test_escape_quote(self): config", "def test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a: 1}", "config['short'] == 12 assert isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321", "' config2 = ConfigFactory.parse_string( \"\"\" { a: { b: {", "is None assert config.get_float('t.g') is None assert config.get_string('t.g') is None", "== 'b' assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config =", "\"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x =", "config['c'] == [5, 6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "4 ] \"\"\" ) assert config1['a'] == [3, 4] def", "config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') ==", "config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string(", "b1 = [] var = \"wrong\" compilerCommon : ${common} {", "7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a =", "['php', 'python'] assert config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert", "years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a:", "ConfigFactory.parse_string( \"\"\" foo : { a : { c :", "e = \"str \" } } d = ${a.b.c} f", "a: [ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] ==", "\"\"\" list = [ 1, 2, 3 ] \"\"\" )", "config['c'] == .54 assert config['d'] == -.54 def test_sci_real(self): \"\"\"", "\"\"\" ) assert config.get_string('t.c') == '5' assert config.get_int('t.c') == 5", "} d = test ${a.b.c} me } \"\"\" ) assert", "${a.d} \"\"\" ) assert config.get(\"a\") == {'b': 3, 'c': [1,", "include required(\"dummy.txt\") 3 4 ] \"\"\" ) def test_resolve_package_path(self): path", "config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2'] ==", "def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x", "t = [1, # comment 7 2, # comment 8", "b = [${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\"", "\"\"\" x = ${x} x = ${x} \"\"\" ) def", "10, y : ${x} }') fdin.flush() config = ConfigFactory.parse_string( \"\"\"", "import timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock", "config.get(\"x.y\") == {'z': 1} assert config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys())", "== 'a b c' assert config.get('b') == '5 b' assert", "= ${data-center-generic} \"\"\" ) assert config6['data-center-east'] == { 'name': 'east',", "x += [1,2] \"\"\" ) assert config.get(\"x\") == [1, 2]", "test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: { b: {", "1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\") == 1", "config = ConfigFactory.parse_string( \"\"\" x = {y: {y: 1}} x", "assert len(b) == 1 assert b[0] == {\"v1\": 2, \"v2\":", "} } \"\"\" ) assert config2['database']['host'] == 'other.host.net' assert config2['database']['port']", "\"\"\" ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\" x =", "'num': 3, 'retries_msg': 'You have 3 retries' } def test_substitution_cycle(self):", "+= {b: 2} \"\"\" ) assert config.get(\"x\") == {'a': 1,", "] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list += [", "3 \"\"\" ) config3 = config1.with_fallback(config2) assert config3['a'] == {", "config.get_list('bars') assert len(bars) == 10 names = {bar['name'] for bar", "6 ] \"\"\" ) assert config.get('a') == [1, 2, 3,", "x = ${x} x = ${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self):", "import tempfile from collections import OrderedDict from datetime import timedelta", "Should be equivalent to x = abc x = ${?x}", "resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"dict\") == {'x': 1,", "include required(\"samples/animals.d/cat.conf\") t = 2 } \"\"\" ) expected =", "test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert", "] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = {f:", "== 'east' config2 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "config = ConfigFactory.parse_string( \"\"\" x = [1,2] x = ${x}", "3 mon', '3 mon'), ('a: 1 years', relativedelta(years=1)), ('a: 1years',", "bar \"\"\" ) assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config", "config = ConfigFactory.parse_string( \"\"\" a { d { g.h.j.u: 5", "c' assert config.get('b') == '5 b' assert config.get('c') == 'b", "[1,2] x = ${x} [3,4] x = [-1, 0] ${x}", "the temp dir to sys.path so that 'my_module' can be", "comment 11 // comment 12 \"\"\" ) assert config.get('c') ==", "with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1", "3 foo : { c : ${bar.b}, d : 2", "finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = { 'a': 1,", "2, 3] assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is None", "for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2 ==", "v1: 1 } b1 : {v2: 2 } b =", "quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert 'abc\"test' == config['quoted'] assert", "def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = 4 b", "period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a']", "def test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test", "'1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]')", "'b 7' def test_concat_list(self): config = ConfigFactory.parse_string( \"\"\" a =", "def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\" a { b =", "3 } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged)", "config == d def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] =", "= [ \"a\", \"b\", ] b = # test #", "\"\"\" ) assert config.get(\"bar\") == {'foo': 42, 'baz': 42} assert", "str e = \"str \" } } d = test", "assert config['a'] == [ {'a': 1, 'b': 2, 'f': 4},", "c: 4} ${b}, {a: 3} ${b} {c: 6}, ] \"\"\"", "'d': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\" a.b =", "its .overriden_value # if both are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\"", "ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert config", "test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR} \"\"\") assert", "ConfigFactory.parse_string( \"\"\" a.b = 3 a.b = ${a.b} a.b =", "'5 b' assert config.get('c') == 'b 7' def test_concat_list(self): config", "x = ${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "ParseSyntaxException import mock import pytest from pyhocon import (ConfigFactory, ConfigParser,", "config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\") ==", "config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)), ('a:", "assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo =", "d def test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1 = { foo", "'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo =", "\"\"\" + '\"' + fdin.name + \"\"\"\" } a :", "config.get_list('sub2.baz') == [\"a\", \"b\"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\"", "10 weeks, 5 minutes,] c: bar \"\"\" ) assert config['b']", "test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [ 1, 2,", "{{ include \"{tmp_file}\" c: 3 d: 4 }} \"\"\".format(tmp_file=fdin.name) )", "} www.example-ö.com { us { name = \"second domain\" }", "= ConfigFactory.parse_string( \"\"\" { a : { include \"\"\" +", "${b} ${a} d: ${a} ${b} d: ${a} bar \"\"\") assert", "\"\"\" a = 45 b = ${?c} d = ${?c}", "# test # test2 { c: 3, d: 4,} c", "u\"\"\" www.sample.com { us { name = \"first domain\" }", "') != ''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config", "\"\"\" { data-center-generic = { cluster-size: 8 } misc =", "'a': ['foo\"', \"bar\"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3,", "'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert", "= 9, opts = \"-Xmx4g\" } \"\"\" ) assert config3.get('data-center-east.cluster-size')", "[-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g',", "{ \"g\": { \"h\": { \"j\": { \"u\": 5 },", "12 \"\"\" ) assert config.get('c') == 'test' assert config.get('g') ==", "assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\" data-center-generic =", "assert config.get('b') == '5 b' assert config.get('c') == 'b 7'", "= os.path.join(module_dir, 'my.conf') # create the module folder and necessary", "= 2 config = ConfigFactory.from_dict(d) assert config == d def", "assert config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\"", "is None assert config.get_string('t.g') is None assert config.get_bool('t.g') is None", "# comment 5 } # comment 6 t = [1,", "[perl] host_modules = ${common_modules} 55 \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", ") assert config.get(\"bar\") == {'foo': 42, 'baz': 42} assert set(config.keys())", "config.get(\"x\") == [-3, -2, -1, 0, 1, 2, 3, 4,", "database { host = \"other.host.net\" port = 433 } }", "b c d') == 'test2' assert config.get('a b c d", "common_modules = [perl] \\ [java] \\ [python] \"\"\" ) assert", "dummy \"\"\") assert config == { 'a': 'foo bar dummy'", "4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\" a", "{ x: 1 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\"", "'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load the config and", "'milk' in types def test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\" a:", "${common_modules} [java] \"\"\" ) assert config.get('host_modules') == ['php', 'python', 'java']", "\"\"\" a = [ \"a\", \"b\", ] b = #", "ConfigFactory.parse_string( \"\"\" a { a: 1, b: 2, } b", "\"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"list\") == [1,", "in bars} types = {bar['type'] for bar in bars if", "format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\"", "ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException):", "// abc abc c = 5 \"\"\") assert config ==", "} \"\"\" ) assert config.get(\"bar\") == {'foo': 43, 'baz': 43}", "test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert len(bars) ==", "[1, 2, 3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\":", "// bar.a should end up as 4 bar : {", "\"\"\" Quoted strings are in the same format as JSON", "discovered monkeypatch.syspath_prepend(temp_dir) # load the config and include the other", "[1,2] \"\"\" ) assert config.get(\"x\") == [1, 2] def test_self_append_object(self):", "= ${data-center-generic} {name = \"east\"} \"\"\" ) assert config.get('data-center-east.cluster-size') ==", "e\"=test3 \"\"\" ) assert config.get('a') == 1 assert config.get('b') ==", "= ${x} {y: 1} x = ${x.y} \"\"\" ) def", "ConfigFactory.parse_string( \"\"\" main_language = php languages = [java, ${main_language}] \"\"\"", "sec', '6 sec'), ('a: 7 hours', period(hours=7)), ('a: 8 hour',", "[a, 1, 10 weeks, 5 minutes,] c: bar \"\"\" )", "config['d'] == '1 bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\"", "of number \"\"\" config = ConfigFactory.parse_string( \"\"\" short = 12.12321", "${data-center-east} {tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name')", "def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] =", "isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int) assert", "assert config['b'] == {'c': 3, 'd': 4} assert config['c'] ==", "('a: 3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set):", ": { } b1 = [] var = \"wrong\" compilerCommon", "'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a b c d')", "config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') ==", ": { c : 1 } } foo : ${foo.a}", "4 }} \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2 =", "== 121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3", "bb \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "{b: 1} a: {c: 2} b: {c: 3} {d: 4}", "{ cluster-size = 6 } data-center-east = ${data-center-generic} data-center-east =", "cluster-size = 6 } data-center-east = ${data-center-generic} {name = \"east\"}", ") config2 = ConfigFactory.parse_string( \"\"\" list += [ 4, 5,", "\"\"\" a = 123 a = ${?test} a = 5", "line2 test2 = test \"\"\") assert config == { 'test':", "config1 = ConfigFactory.parse_string( \"\"\" { data-center-generic = { cluster-size: 8", "'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\")", "['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\" application.foo =", "m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3 min', '3 min'),", "ConfigFactory.parse_string( \"\"\" a: 1 b: 2 include package(\"my_module:my.conf\") \"\"\" )", "2 assert config.get('b.c') == 5 assert config.get('b.d') == 4 def", "test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a = 5 b=${a}${a} c=${a}", "@mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR =", "assert config['b'] == [3, 4] assert config['c'] == [5, 6]", "b1 : { v1: 2, v2: 3 } \"\"\", resolve=False", "= test ${a.b.c} me f = test ${a.b.e} me }", "config['common_modules'].split() if x.strip(' ') != ''] == ['perl', 'java', 'python']", "assert config == { 'a': 'foo bar dummy' } def", "up as 4 bar : { a : ${foo.d}, b", ": \"right\" } b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite}", "config['c'] == 'foo 1' assert config['d'] == '1 bar' def", "'-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "assert config.get_float('t.g') is None assert config.get_string('t.g') is None assert config.get_bool('t.g')", "config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\"", ": 1 c : ${a} { d : [ ${b}", "temp dir to sys.path so that 'my_module' can be discovered", "{'a': 5, 'b': 5, 'c': 5} == config1 def test_optional_substitution(self):", "\"\"\"\" } a : { x : 42 } }", "config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') ==", "config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\"", "string_from_env = ${STRING_VAR} \"\"\") assert config == { 'string_from_env': 'value_from_environment'", "equivalent to x = abc x = ${?x} def '''", "} with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b: 2}') fdin.flush()", "Adress issue #110 # ConfigValues must merge with its .overriden_value", "\"\"\", resolve=True) assert config == { 'a': 1, 'b': {'pa':", "assert config1.get('d') == 5 config2 = ConfigFactory.parse_string( \"\"\" { a:", "def test_include_dict(self): expected_res = { 'a': 1, 'b': 2, 'c':", "${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc", "foo : { a : 2 } \"\"\" ) assert", "assert {'a': 5, 'b': 5, 'c': 5} == config1 def", "{f: 5} common_modules ${a} {perl: 1} \"\"\") def test_invalid_dict(self): with", "\"\"\" b1 : { v1: 2, v2: 3 } \"\"\",", "not in config assert config['d'] == 4 assert config['e'] ==", ") config2 = ConfigFactory.parse_string( \"\"\" dict = ${dict} { y:", "config.get('a.b') == 1 assert config.get('a.c') == 2 assert config.get('b.c') ==", "test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a: 1} x", "config = ConfigFactory.parse_string( \"\"\" common : { } b1 =", "neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3 \"\"\"", "'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\" data-center-generic", "== 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"5\"') assert", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self):", "] \"\"\" ) assert config1['a'] == [3, 4] def test_include_required_file(self):", "\"\"\" application.foo = 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2", "= ${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "sub : ${base} { baz: ${base.bar} [\"b\"] } sub2: ${sub}", "{ h.d: 4 } g.h.k: f d } h.i.m =", ") assert config.get('a') == 1 assert config.get('b') == 'abc' assert", "tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string(", "\"\"\" a = { f: 5 g } \"\"\") with", "\"baz\" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the", "${a.b.e} } \"\"\" ) assert config1.get('a.b.c') == 'str' assert config1.get('d')", "'192.168.1.3/32' assert config.get_int('t.g') is None assert config.get_float('t.g') is None assert", "def ''' config = ConfigFactory.parse_string( \"\"\" x = abc x", "assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain'", "config['b'] == 'b' assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config", "config['database.user'] == 'test_user' assert config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self): config", "3] def test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t", "== 'test 5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", ") assert config1['a'] == [1, 2] config2 = ConfigFactory.parse_string( \"\"\"", "assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\")", "'foo 5 43' } def test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", "# test2 [ 3, 4,] c = [ 5, 6", "\"\"\" ) assert config.get(\"x\") == [1, 2, 3, 4] def", "expected == config config2 = ConfigFactory.parse_string( \"\"\" a { include", "assert config['a'] == 1 and config['b'] == 1 def test_object_field_substitution(self):", "config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\") ==", "config = ConfigFactory.parse_string( \"\"\" common_modules = [perl] \\ [java] \\", "] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules", "== { 'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config", "= ${a} \"\"\") def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "dict = ${dict} { y: 2 } \"\"\", resolve=False )", "period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ])", "g = 6 test # comment 0 # comment 3", "\"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>' def", "== '<PASSWORD>' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\" a: 1", "config = ConfigFactory.parse_string( \"\"\" a.b = 3 a.b = ${a.b}", "2, } config = ConfigFactory.from_dict(d) assert config == d def", "5} == config1 def test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" a", "database { name = peopledb pass = <PASSWORD> } database", "5 \"d\" = true e.y = { f: 7 g:", "1, b: 2, } b # test # test2 {", "def\" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\" x += [1,2]", "} h.i { e:65 } } \"\"\") expected_result = {", "config = ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_nested_dict(self): d", "value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self):", "= perl \\ java \\ python \"\"\" ) assert [x.strip()", "config = ConfigFactory.parse_string( \"\"\" x = [1,2] x += [3,4]", "\"\"\" a = ${b} b = ${c} c = ${a}", "('a: 110 microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a: 112", "config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "== '-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size", "] \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2] def test_include_missing_file(self):", "config['short'] == 12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1'] ==", "a: {b: 1} a: {c: 2} b: {c: 3} {d:", "config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo = 5.0\")", "= ConfigFactory.parse_string( \"\"\" x = {y: {z: 1}} x =", "{ \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\":", "= \"localhost\" port = 8000 url = ${database.host}\":\"${database.port} } database", "c: 3 d: 4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert", "# test # test2 [ 3, 4,] c = [", "c : ${bar.b}, d : 2 } foo.d = 4", "110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis',", "append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name')", ") assert config.get('a') == [1, 2, 3, 4, 5, 6]", "config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert", "= ConfigFactory.parse_string( \"\"\" common_modules = {a:perl} \\ {b:java} \\ {c:python}", "('a: 3 mon', '3 mon'), ('a: 1 years', relativedelta(years=1)), ('a:", "relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a:", "''' Should be equivalent to x = abc x =", "weeks, 5 minutes,] c: bar \"\"\" ) assert config['b'] ==", "\\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n", "2, 3, 4] def test_self_append_string(self): ''' Should be equivalent to", "ConfigFactory.parse_string( \"\"\" x = {a: 1} x += {b: 2}", "4, 5, 6 ] \"\"\", resolve=False ) config2 = config2.with_fallback(config1)", "\"\"\"t = { c = 5 \"d\" = true e.y", "comment 0 # comment 3 a: { # comment 4", "\"\"\" common_modules = [perl] host_modules = aa ${common_modules} \"\"\" )", "the other config file from 'my_module' config = ConfigFactory.parse_string( \"\"\"", "] \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2] config3 =", "assert config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string(", "www.sample.com { us { name = \"first domain\" } }", "\"\"\" list = ${list} [ 4, 5, 6 ] \"\"\",", "= [${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b1", "= { 'a': { 'garfield': { 'say': 'meow' }, 't':", ".54 d = -.54 \"\"\" ) # on python 3", "'abc\"test' == config['quoted'] assert 'abc\"test' == config['unquoted'] def test_escape_quote_complex(self): config", "b : 1 } bar.b = 3 // foo.c should", "config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): \"\"\"", "config == { 'no_trailing_ws': \"foo bar \", 'trailing_ws': \"foo bar", "assert 'abc\"test' == config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\"", ") assert config['a'] == [ {'a': 1, 'b': 2}, {'a':", ") with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules =", "// comment 11 // comment 12 \"\"\" ) assert config.get('c')", "9 ] } # comment 10 // comment 11 //", "a = 5 b=${a}${a} c=${a} ${a} \"\"\" ) assert config", "{ e: 5, f: 6 } \"\"\" ) assert config['a']", "def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\" { a: { b:", "= ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules = ${common_modules}", "} \"\"\" ) assert config.get('a.b.c') == 7 assert config.get('a.b.d') ==", "4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\"", "b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\") assert", "a = 121.22 b = -121.22 c = .54 d", "= [5, 6] a: [ ${b} [1, 2] [3, 4]", "{ 'name': 'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string( \"\"\"", "config config2 = ConfigFactory.parse_string( \"\"\" a { include required(file(\"samples/animals.d/cat.conf\")) t", "= 12321321837612378126213217321 negative = -15 \"\"\" ) # on python", "= ConfigFactory.parse_string( \"\"\"t = { c = 5 \"d\" =", "} d = test ${a.b.c} } \"\"\" ) assert config2.get('a.b.c')", "config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\" {", "test # test2 { c: 3, d: 4,} c =", "assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)),", "r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\",", "assert config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are", "set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x", "${?a}foo \"\"\" ) assert config.get(\"a\") == 'foo' assert set(config.keys()) ==", "config1 = ConfigFactory.parse_string( \"\"\" a: {{ include \"{tmp_file}\" c: 3", "'-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\" list =", "common_modules = perl \\ java \\ python \"\"\" ) assert", "period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a:", "assert config['a.b'] == \"foo\" assert config['a.c'] == \"foo foo\" assert", "# comment 3 a: { # comment 4 b: test,", "${common_modules} aa \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a: 113 milli',", "'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash:", "== {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string(", "== 'test' assert config.get('t') == [1, 2, 3] def test_missing_config(self):", "x : 10, y : ${x} }') fdin.flush() config =", "config == { 'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config", "test_sci_real(self): \"\"\" Test scientific expression of number \"\"\" config =", "== [1, 2, 3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1", "\"\"\", resolve=False ) config6 = config4.with_fallback(config5) assert config6 == {", "period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a:", "} def test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\" data-center-generic = {", "[1]} } def test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\" a =", "== config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\" common :", "5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a", "config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with", "\\ java \\ python \"\"\" ) assert [x.strip() for x", "config = ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name)", "fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: [ include", "'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\"", "[3, 4] assert config['c'] == [5, 6] def test_assign_list_strings_with_eol(self): config", "abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\") assert 'abc' == config['/abc/cde1']", "1, 'b': 2}, {'a': 3, 'c': 4} ] def test_list_of_lists(self):", "{'d': 6} } assert expected == config_tree def test_merge_overriden(self): #", "to x = abc x = ${?x} def ''' config", "} } o2 = { foo : { b :", "config = ConfigFactory.parse_string( \"\"\" short = 12.12321 long1 = 121.22E3423432", "'/tmp' config5 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "5 } h.i { e:65 } } \"\"\") expected_result =", "b: { c = 5 } } a.b { c", "def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec ''' config =", "in config_tree['foo'] and config_tree['foo']['s'] == 3 def test_attr_syntax(self): config =", "config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\" a:", "\"\"\") assert 'b' not in config assert config['d'] == 4", "line\" \"second\" line \\\"\\\"\\\" } j = [1, 2, 3]", "= abc x = ${?x} def ''' config = ConfigFactory.parse_string(", "milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)),", "seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a: 6 s', period(seconds=6)),", "config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config", "= ConfigFactory.parse_string( \"\"\" b = [5, 6] a: [ ${b}", "foo.d = 4 \"\"\" ) assert config.get(\"bar\") == {'a': 4,", ") assert config.get('host_modules') == ['php', 'python', 'java'] config2 = ConfigFactory.parse_string(", "assert config1.get('f') == 'str ' config2 = ConfigFactory.parse_string( \"\"\" {", "{ us { name = \"second domain\" } } \"\"\"", "\\ python \"\"\" ) assert [x.strip() for x in config['common_modules'].split()", "config2.get('a.b.c') == 'str' assert config2.get('d') == 'test str' assert config2.get('f')", ") assert config['a']['x'] == 42 assert config['a']['y'] == 42 def", "'Homer\\'s favorite coffee' in names assert 'milk' in types def", "3mo', relativedelta(months=3)), ('a: 3 mon', '3 mon'), ('a: 1 years',", "should end up as 3 foo : { c :", "${x.y} \"\"\" ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\" x", "} \"\"\" ) assert config3.get('a.b.c') == 5 assert config3.get('d') ==", "foo : { a : 1 b : 2 }", "= \"first domain\" } } www.example-ö.com { us { name", ") assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') == 3 assert", "= ${non_existent} abc \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules", "${e} } } d = test ${a.b.c} me e =", "common_modules = [php, python] host_modules = [java] ${common_modules} [perl] \"\"\"", "\"\"\" ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def", "('a: 1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a: 3 y',", "assert config.get('a') is None assert config.get('b')[0] is None def test_parse_override(self):", "assert config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string(", "} b = [${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string(", "assert config['t.d.c'] == 5 assert config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self):", "types def test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\" a: [ {a:", "1 c: 2 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\"", "def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo = bar\") assert config['foo'] ==", "== [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string(", "as fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a:", ") assert config3.get('a.b.c') == 5 assert config3.get('d') == 'test 5", "= ConfigFactory.parse_string( \"\"\" a = 45 b = ${?c} d", "favorite coffee' in names assert 'milk' in types def test_list_of_dicts(self):", "negative = -15 \"\"\" ) # on python 3 long", "== 'east' assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp'", "d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)),", "{ x : 42 } } \"\"\" ) assert config['a']['x']", "ABOVE SETTINGS!\"\"\") assert config_tree == { 'foo': '1', 'bar': '2'", "\"\"\" common_modules = [perl] host_modules = ${common_modules} 55 \"\"\" )", "config = ConfigFactory.parse_string( \"\"\" a = null b = [null]", "d = ${a.b.c} } \"\"\" ) assert config1.get('a.b.c') == 5", "5.0\") assert config['foo'] == 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string(", "{z: 1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\") ==", "c = str e = \"str \" } } d", "'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts':", "${common_modules} bb \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def", "ConfigFactory.parse_string( \"\"\" test = line1 \\ line2 test2 = test", "{ host = ${?DB_HOST} } database { host = \"other.host.net\"", "} data-center-east = {name = \"east\"} ${data-center-generic} { cluster-size =", "{ c = str e = \"str \" } }", "= ${?user} pass = ${?pass} } \"\"\") assert config['database.name'] ==", "4]} assert two == 2 assert three == 3 def", "resolve=False ) config6 = config4.with_fallback(config5) assert config6 == { 'longName':", "config = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules =", "\"c:\\\\temp\" } \"\"\" expected = { 'plain-backslash': '\\\\', 'tab': '\\t',", "'baz': 42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example", "ConfigFactory.parse_string( \"\"\" a = 121.22 b = -121.22 c =", "config1['a'] == [1, 2] config2 = ConfigFactory.parse_string( \"\"\" a: [", "def test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1 = { foo :", "config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == [] def test_include_dict_from_samples(self): config", "'hey dude!' assert config.get('t.e.y.h') == 'hey man' assert [v.strip() for", "'d': 4 } with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{a: 1, b:", "as fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string(", "assert config['a']['x'] == 42 assert config['a']['y'] == 42 def test_var_with_include_keyword(self):", "config = ConfigFactory.parse_string( \"\"\" // bar.a should end up as", "} \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" e :", "OrderedDict from datetime import timedelta from pyparsing import ParseBaseException, ParseException,", ") assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east' assert", "-.54 def test_sci_real(self): \"\"\" Test scientific expression of number \"\"\"", "VAR : \"right\" } b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon}", "= ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert len(bars) == 10 names", "\"east\"} ${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name')", "def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\" common_modules = {a:perl} \\", "is not config1 # test no mutation on config2 assert", "ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name) ) assert", ") assert config.get('a') == 'a b c' assert config.get('b') ==", "0] ${x} [5, 6] x = [-3, -2] ${x} \"\"\"", "{ name = ${?user} pass = ${?pass} } \"\"\") assert", "[ \"e\", \"f\" ] \"\"\" ) assert config['a'] == ['a',", "== 5 assert config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self):", "c: 5 } \"\"\" ) assert config.get('a.b') == 1 assert", "= \"c\" \"\"\" ) assert config['a'] == 'a' assert config['b']", "@pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey", "'b': 'test', 'a': 'foo bar test dummy', 'c': 'foo 5", "config['b'] == [3, 4] assert config['c'] == [5, 6] def", "a: {{ c: 3 include \"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name)", "comment 6 t = [1, # comment 7 2, #", "config.get('a') == 1 assert config.get('b') == 'abc' assert config.get('c') ==", "test ${a.b.c} me e = 7 } \"\"\" ) assert", "128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}]", "= {z: 0} ${x} x = {y: -1} ${x} {d:", "\"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} \"4\"') def", "'my.conf') # create the module folder and necessary files (__init__", "test # test2 [ \"c\", \"d\",] c = [ \"e\",", "assert config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self):", "assert config['a'] == 121.22 assert config['b'] == -121.22 assert config['c']", "= ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 == config_tree.pop('a.b', 5)", "h.i { d: 5 } h.i { e:65 } }", "bar in bars if 'type' in bar} print(types, '(((((') assert", "= { c = 5 \"d\" = true e.y =", "== 7 assert config.get('t.j') == [1, 2, 3] assert config.get('t.u')", "assert config.get('b.c') == 5 assert config.get('b.d') == 4 def test_concat_string(self):", "\"\"\" a { include required(\"samples/animals.d/cat.conf\") t = 2 } \"\"\"", "(__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w')", "= ConfigFactory.parse_string( \"\"\" a: 1 b: foo c: ${a} ${b}", "= { cluster-size = 6 } data-center-east = {name =", "} config4 = ConfigFactory.parse_string( \"\"\" name: foo \"\"\" ) config5", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} \"\"\" ) def", "common_modules = [perl] host_modules = aa ${common_modules} \"\"\" ) with", "['java', 'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config =", "assert config.get(\"x.y\") == {'z': 1} assert config.get(\"x.z\") == 1 assert", "= [${x}, 2, 3] \"\"\" ) (one, two, three) =", "\"\"\" b1 : { v1: 1 } b = [${b1}]", "= 55 ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "ANY OF THE ABOVE SETTINGS!\"\"\") assert config_tree == { 'foo':", "[ [1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self): config =", ") assert config1['a'] == expected_res config2 = ConfigFactory.parse_string( \"\"\" a:", "c : 1 } } foo : ${foo.a} foo :", "ConfigFactory.parse_string( \"\"\" a = 5 \"\"\" ) # b is", "== ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "\"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\":", "assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self):", "with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try:", "test_quoted_key_with_dots(self): config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t { \"d\":", "config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string(", "x = abc x = ${?x} def ''' config =", "= ConfigFactory.parse_string( \"\"\" a = a b c b =", "== 2 assert b[0] == {'v2': 2} assert b[1] ==", "3, 4, 5, 6] assert config.get_list('a') == [1, 2, 3,", "\"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2] config2 = ConfigFactory.parse_string(", "\"\"\" test = line1 \\ line2 test2 = test \"\"\")", "test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\" a: {b: 1} a: {c:", "ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo'] == 5.0 def test_list_substitutions(self): config", "} \"\"\" ) assert config.get('a.b') == 1 assert config.get('a.c') ==", "5 assert config1.get('d') == 5 config2 = ConfigFactory.parse_string( \"\"\" {", "= ConfigFactory.parse_string( \"\"\" b2 : ${x} {v2: 3} b +=", "from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" // bar.a", "be a long assert config['short'] == 12 assert isinstance(config['short'], int)", "2, } b = # test # test2 { c:", "\"\"\" a = [ 1, 2, ] b = #", "assert config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self):", "pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir", "python] host_modules = [java] ${common_modules} [perl] full_modules = ${host_modules} [c,", "\"c\", \"d\",] c = [ \"e\", \"f\" ] \"\"\" )", "{ 'b': 1, 'c': 2, 'd': 3 } config4 =", "data-center-east = {name = \"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp}", "config['b'] == 1 def test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\" A", "a.c = ${a.c} a.d = {foo: bar} a.d = ${a.d}", "== 3 assert config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a') ==", "micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)),", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = ${common_modules} aa", "43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from", "= ConfigFactory.parse_string( \"\"\" a: foo b: [a, 1, 10 weeks,", "str ' config3 = ConfigFactory.parse_string( u\"\"\" { a: { b:", "config2 = config2.with_fallback(config1) assert config2.get(\"dict\") == {'x': 1, 'y': 2}", "the same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source", "foo : { b : 3 c : 4 }", "collections import OrderedDict from datetime import timedelta from pyparsing import", "int but on python 2 long with be a long", "('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000", "nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)),", "2, 5, 6, 7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException):", "h.i.m = 7 h.i { d: 5 } h.i {", "${num} retries retries_msg = ${?CUSTOM_MSG} \"\"\") assert config == {", "assert config.get_list('a') == [1, 2, 3, 4, 5, 6] def", "[1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\" x =", "assert config == [1, 2, 3] def test_quoted_key_with_dots(self): config =", "assert config.get('a.c') == 2 assert config.get('b.c') == 5 assert config.get('b.d')", "a : { x : 42 } } \"\"\" )", "\"\"\" { database { host = \"localhost\" port = 8000", "def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] =", "ConfigFactory.parse_string( \"\"\" a: foo b: 10 weeks c: bar \"\"\"", "''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string(", "= \"foo\" \"bar \" trailing_ws = \"foo\" \"bar \"{ws} trailing_ws_with_comment", "b: test, # comment 5 } # comment 6 t", "4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == expected_res", "== 'test' assert config.get_string('a.b') == 'test' assert config.get('t') == [1,", "d e\"=test3 \"\"\" ) assert config.get('a') == 1 assert config.get('b')", "config = ConfigFactory.parse_string( \"\"\" A = ${Test} Test { field1", "ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz} bar: {r:", "4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b =", "application.foo = 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 =", "assert config.get('b.d') == 4 def test_concat_string(self): config = ConfigFactory.parse_string( \"\"\"", "3 t { \"d\": { \"c\": 5 } } k", "} \"\"\" ) expected = { 'a': { 'garfield': {", "to sys.path so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) #", "= ConfigFactory.parse_string( \"\"\" a = 5 \"\"\" ) # b", ") assert config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\") == \"12\" assert", "\"abc def\" def test_self_append_non_existent_string(self): ''' Should be equivalent to x", "b 7 \"\"\" ) assert config.get('a') == 'a b c'", "2, # comment 8 3, # comment 9 ] }", "= ConfigFactory.parse_string( \"\"\" { a: { b: { c =", "ConfigFactory.parse_string('a = [4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]')", "== 'test' assert config.get('g') == '6 test' assert config.get('a.b') ==", "6 test # comment 0 # comment 3 a: {", ") assert config.get(\"bar\") == {'a': 4, 'b': 3} assert config.get(\"foo\")", "''' config = ConfigFactory.parse_string( \"\"\" foo : { a :", "} \"\"\" ) assert config1.get('a.b.c') == 'str' assert config1.get('d') ==", "go] \"\"\" ) assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules')", "{ # comment 4 b: test, # comment 5 }", "128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2", "== {'c': 3, 'd': 4} assert set(config.keys()) == set(['bar', 'foo'])", "5 assert config['c'] == 6 def test_assign_int(self): config = ConfigFactory.parse_string(", "-121.22 assert config['c'] == .54 assert config['d'] == -.54 def", "include package(\"my_module:my.conf\") \"\"\" ) # check that the contents of", "\"\"\" string = abc \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\"", "} d: { pc: ${b.pa} } e: ${b} \"\"\", resolve=True)", "'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a {", "'no-tab': '\\\\t', 'newline': '\\n', 'no-newline': '\\\\n', 'cr': '\\r', 'no-cr': '\\\\r',", "config = ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" )", "test \"\"\") assert config == { 'test': 'line1 line2', 'test2':", "= test // comment 0 g = 6 test #", "baz \"\"\" ) assert config['a.b'] == \"foo\" assert config['a.c'] ==", "\"\"\" a = // abc abc c = 5 \"\"\")", "config3 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules =", "d: 4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] ==", "{ \"a\": { \"d\": { \"g\": { \"h\": { \"j\":", "def test_self_append_string(self): ''' Should be equivalent to x = abc", "d['apple'] = 4 d['pear'] = 1 d['orange'] = 2 config", "\"d\": { \"c\": 5 } } k { \"b.f.d\": 7", "assert config.get(\"a\") == 'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self):", "= config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic': {'cluster-size': 8}, 'data-center-east':", "'b': 2}, {'a': 3, 'c': 4} ] def test_list_of_lists(self): config", "\"\"\" ) assert config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6", "'@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey man{}\".format(forbidden_char)", "5 } } d = ${a.b.c} } \"\"\" ) assert", "config.get('d') == 'test 7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string(", "abc x += def \"\"\" ) assert config.get(\"x\") == \"abc", "config == json.loads(source) try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [", "x = ${x} {y: 1} x = ${x.y} \"\"\" )", "\", 'trailing_ws': \"foo bar \", 'trailing_ws_with_comment': \"foo bar \" }", "assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\"", "== config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3']", "== {\"v1\": 2, \"v2\": 3} b = merged.get(\"b\") assert len(b)", "\"\"\" ) assert config['common_modules'] == {'a': 'perl', 'b': 'java', 'c':", "resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") ==", "3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\" x = {y:", ") assert config.get('a') is None assert config.get('b')[0] is None def", "'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string(", "'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def", "} } assert expected == config config2 = ConfigFactory.parse_string( \"\"\"", "a = { a: 1, b: 2, } b =", "\\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert", "\\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash']", "{ y: 2 } \"\"\", resolve=False ) config2 = config2.with_fallback(config1)", "{{ c: 3 d: 4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) )", "'3 mon'), ('a: 1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a:", "== {'a': 1, 'b': 2} assert config['b'] == {'c': 3,", "config = ConfigFactory.parse_string( \"\"\" a = 5 \"\"\" ) #", "microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)),", "field1 = 1 field2 = ${Test.field1}\"2\" field3 = ${Test.field2}\"3\" }", "== config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5) expected =", "\"\"\" x = [1,2] x = {x: [3,4]} x =", "= .54 d = -.54 \"\"\" ) # on python", ") assert config.get(\"x\") == \" def\" def test_self_append_nonexistent_array(self): config =", "assert config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" b1 : { v1: 2, v2: 3 }", "assert config4.get('host_modules') == ['java', 'php', 'python', 'perl'] assert config4.get('full_modules') ==", "pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions", "'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab': '\\\\t', 'newline': '\\n', 'no-newline': '\\\\n',", "microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)),", "4} assert config['c'] == {'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self):", "['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char))", "'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\" x +=", "[null] \"\"\" ) assert config.get('a') is None assert config.get('b')[0] is", "] \"\"\" ) assert config['a'] == [ [5, 6, 1,", "a.d = baz \"\"\" ) assert config['a.b'] == \"foo\" assert", "'-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "\"\"\" b = {f: 4} a: [ ${b} {a: 1,", "\"\"\" a: {{ include \"{tmp_file}\" c: 3 d: 4 }}", "{ 'include-database': True } def test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\"", "if 'type' in bar} print(types, '(((((') assert '<NAME>' in names", "ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} abc \"\"\" ) with pytest.raises(ConfigSubstitutionException):", "assert 3 == config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5)", "3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\" b1 : {", "b=${a}${a} c=${a} ${a} \"\"\" ) assert config == { 'a':", "def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo'] ==", "} g.h.k: f d } h.i.m = 7 h.i {", "assert config.get_bool('t.g') is None assert config.get_list('t.g') is None assert config.get_config('t.g')", "= foo \"bar\" dummy \"\"\") assert config == { 'a':", "test_include_package_file(self, monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module')", "config4 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "== config def test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\" // comment", "\"\"\", resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config =", "ConfigFactory.parse_string( \"\"\" dict = ${dict} { y: 2 } \"\"\",", "config_tree['foo']['s'] == 3 def test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\" a:", "include \"\"\" + '\"' + fdin.name + \"\"\"\" } a", "config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with", "\"\"\" a: [ include \"dummy.txt\" 3 4 ] \"\"\" )", "millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)),", "dummy', 'c': 'foo 5 bv', 'd': 'foo 5 43' }", "config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') ==", "config['quoted'] assert 'abc\"test' == config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string(", "config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self):", "ConfigFactory.parse_URL(\"https://nosuchurl\") assert config == [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\")", "= ConfigFactory.parse_string( \"\"\" x = {x: [3,4]} x = [${x},", "8 hour', period(hours=8)), ('a: 9 h', period(hours=9)), ('a: 10 weeks',", "assert config2 == { 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8,", ") assert config2['a'] == expected_res config3 = ConfigFactory.parse_string( \"\"\" a:", "= ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\")", "None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*', '&'])", "\"\"\" a: [ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config2['a']", "= ${a.b.e} } \"\"\" ) assert config1.get('a.b.c') == 'str' assert", "def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} \"\"\"", "in names assert 'Homer\\'s favorite coffee' in names assert 'milk'", "config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42)", "relativedelta @pytest.mark.parametrize('data_set', [ ('a: 1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)),", "def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+',", "ConfigFactory.parse_string( \"\"\" dict = { x: 1 } \"\"\" )", "\"foo\" \"bar \"{ws}// comment \"\"\".format(ws=' ')) assert config == {", "'c:\\\\temp', } config = ConfigFactory.parse_string(source) assert config == expected assert", "cluster-size = 6 } data-center-east = {name = \"east\"} ${data-center-generic}", "user = people name = peopledb pass = <PASSWORD> }", ") assert config3.get('a.b.c') == 'str' assert config3.get('d') == 'test str", "assert config == { 'test': 'line1 line2', 'test2': 'test' }", "{v2: 3} b += [${b2}] \"\"\", resolve=False ) merged =", "bar \"\"\" ) assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)]", "''' config = ConfigFactory.parse_string( \"\"\" bar : { foo :", "} bar.b = 3 // foo.c should end up as", "config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config", "{ a: { b: { c = ${e} } }", "config = ConfigFactory.parse_string( \"\"\" x = {y: {z: 1}} x", "${var} } substrate-suite: { VAR : \"right\" } b1 =", "test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [ include required(\"dummy.txt\") 3", "} # comment 10 // comment 11 // comment 12", "config1 = ConfigFactory.parse_string( \"\"\" string = abc \"\"\" ) config2", "assert config['b'] == 'b' assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self):", "assert config.get('a-b-c-d') == 'test' assert config.get('a b c d') ==", "config['b'] == 5 assert config['c'] == 6 def test_assign_int(self): config", "= # test # test2 [ \"c\", \"d\",] c =", "== 'hey man' assert [v.strip() for v in config.get('t.e.y.i').split('\\n')] ==", "\"\"\" ) assert config['a.b'] == \"foo\" assert config['a.c'] == \"foo", "= root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1 and config['b'] == 1", "6, 7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]')", "assert config['a'] == ['a', 'b'] assert config['b'] == ['c', 'd']", "pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {f: 5} common_modules ${a} {perl:", "6 } data-center-east = { name = \"east\" } data-center-east", "config.get('a.b.d') == 8 def test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\" a:", "-2] ${x} \"\"\" ) assert config.get(\"x\") == [-3, -2, -1,", "12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert", "\"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\")", "'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = {", "ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\") assert", "def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$',", "\"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name) ) assert config['x'] == 42", "] \"\"\" ) assert config['a'] == ['a', 'b'] assert config['b']", "period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a:", "'test 7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a", "assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f') == 7 assert config.get('t.j')", "config4 = ConfigFactory.parse_string( \"\"\" name: foo \"\"\" ) config5 =", "x = {z: 0} ${x} x = {y: -1} ${x}", "foo : ${foo.a} foo : { a : 2 }", "== [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config =", "ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\" # DO NOT CHANGE ANY", "ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts =", "{ a : { include \"\"\" + '\"' + fdin.name", "'128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\" application.foo", "test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\" x = {y: {y: 1}}", "} } d = test ${a.b.c} me } \"\"\" )", "= [4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]') with", "# test2 5 c = 6 \"\"\" ) assert config['a']", "\"\"\" ) assert config.get('a.b.c') == 7 assert config.get('a.b.d') == 8", "4} def test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\" a.b = 3", "= ConfigFactory.parse_string( \"\"\" a: [ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) )", "}, \"h\": { \"i\": { \"m\": 7, \"d\": 5, \"e\":", "\"\"\" expected = { 'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab': '\\\\t',", "assert config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as", "== { 'int_from_env': '5' } assert config.get_int('int_from_env') == 5 def", "b: 2}, {a: 3, c: 4} ${b}, {a: 3} ${b}", "43' } def test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a: 1", "test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say')", "'-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a:", "a long assert config['short'] == 12.12321 assert config['long1'] == 121.22E3423432", "\"\"\") assert 5 == config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string(", "= 3 \"\"\" ) config3 = config1.with_fallback(config2) assert config3['a'] ==", "config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [ include", "2] assert config['b'] == [3, 4] assert config['c'] == [5,", "\"second\" line \\\"\\\"\\\" } j = [1, 2, 3] u", "= ConfigFactory.parse_string('foo = \"5\"') assert config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self):", "config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars = config.get_list('bars') assert len(bars) == 10", "'python'] config3 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules", "[java] \"\"\" ) assert config.get('host_modules') == ['php', 'python', 'java'] config2", "assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence']", "\"\"\") config = unresolved.with_fallback(source) assert config['foo'] == 42 config =", "test # test2 [ 3, 4,] c = [ 5,", "= 6 } data-center-east = ${data-center-generic} data-center-east = { name", "test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = { b: 1", "= ConfigFactory.parse_string( \"\"\" x = 5 b = test a", "ConfigFactory.parse_string( \"\"\" { database { host = \"localhost\" port =", "ConfigFactory.parse_string(source) assert config == expected assert config == json.loads(source) try:", "def test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\" main_language = php languages", "e.y = { f: 7 g: \"hey dude!\" h: hey", "('a: 4 seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a: 6", "{ b: 5 } } \"\"\" ) assert config.get_string('a.b') ==", "} \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" a.b = 4", "pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError): ConfigParser.resolve_package_path(\"non_existent_module:foo.py\") def test_include_package_file(self, monkeypatch):", "= ${common_modules} aa \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "\"\"\" a=1, b=\"abc\", c=the man, d=woof, a-b-c-d=test, a b c", "{ 'test': 'line1 line2', 'test2': 'test' } def test_from_dict_with_dict(self): d", "True assert config.get_int('t.e.y.f') == 7 assert config.get('t.j') == [1, 2,", "def test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x", "== [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') ==", "= { name = \"east\" } \"\"\" ) assert config5['data-center-east']", "'w') as fdin: fdin.write(\"{c: 3}\") # add the temp dir", "config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string( \"\"\" { a:", "[perl] full_modules = ${host_modules} [c, go] \"\"\" ) assert config4.get('common_modules')", "{\"v1\": 2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\"", "== \"foo\" assert config['a.c'] == \"foo foo\" assert config['a.d'] ==", "== set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec '''", "= 121.22 b = -121.22 c = .54 d =", "1 d['orange'] = 2 config = ConfigFactory.from_dict(d) assert config ==", "'c': 4, 'f': 4}, {'a': 3, 'c': 6, 'f': 4}", "second', period(seconds=5)), ('a: 6 s', period(seconds=6)), ('a: 6 sec', '6", "== 5 config2 = ConfigFactory.parse_string( \"\"\" { a: { b:", "== {'a': 1, 'b': 2, 'c': 3, 'z': 0, 'y':", "= aa ${common_modules} bb \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "= ConfigFactory.parse_string( \"\"\" x = [1,2] x = {x: [3,4]}", "dude!' assert config.get('t.e.y.h') == 'hey man' assert [v.strip() for v", "${data-center-generic} data-center-east = { name = \"east\" } \"\"\" )", "bar \"\"\") assert config['c'] == 'foo 1' assert config['d'] ==", "assert config.get(\"x\") == \" def\" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string(", "scientific expression of number \"\"\" config = ConfigFactory.parse_string( \"\"\" short", "sys.path so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load", "\"h\": { \"j\": { \"u\": 5 }, \"d\": 4, \"k\":", "2 } } o2 = { foo : { b", "# comment 8 3, # comment 9 ] } #", "test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo = 5\") assert config['foo'] == 5", "['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a =", "test_missing_config(self): config = ConfigFactory.parse_string( \"\"\" a = 5 \"\"\" )", "c = [ 5, 6 ] \"\"\" ) assert config['a']", "ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_ordered_dict(self): d = OrderedDict()", ": 1 } bar.b = 3 // foo.c should end", "period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a:", "def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo \"bar\"", "x : { v1: 1 } b1 : {v2: 2", "12 assert isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'],", "${a} {perl: 1} \"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\"", "${a.b.c} f = ${a.b.e} } \"\"\" ) assert config1.get('a.b.c') ==", "'include-database': True } def test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\" database", "[3, 4] [ 5, 6 ] \"\"\" ) assert config.get('a')", "assert config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\"", "] \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2] config2 =", "== { 'x': 5, 'b': 'test', 'a': 'foo bar test", "= 121.22e3423432 neg_long2 = 121.22e-3 \"\"\" ) # on python", "2} \"\"\" ) assert config.get(\"x\") == {'a': 1, 'b': 2}", "= ${?x} def ''' config = ConfigFactory.parse_string( \"\"\" x +=", "assert config_tree == { 'a': ['foo\"', \"bar\"] } def test_pop(self):", "days', period(days=10)), ('a: 11 day', period(days=11)), ('a: 12 d', period(days=12)),", "3 a.b = ${a.b} a.b = ${a.b} a.c = [1,2]", "def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec ''' config =", "{ b = foo c = bar } a.c =", "'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert 'fgh' ==", "== 4 assert config['e'] == 45 assert 'g' not in", "{'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\" common_modules =", "== { 'test': 'line1 line2', 'test2': 'test' } def test_from_dict_with_dict(self):", "required(\"samples/animals.d/cat.conf\") t = 2 } \"\"\" ) expected = {", "test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\" /abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3:", "test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\" a.b = 3 a.b =", "} \"\"\" ) assert config['\"a.b.c.d\"'] == 3 assert config['t.d.c'] ==", "== json.loads(source) try: from dateutil.relativedelta import relativedelta @pytest.mark.parametrize('data_set', [ ('a:", "= line1 \\ line2 test2 = test \"\"\") assert config", "abc abc c = 5 \"\"\") assert config == {", "var = \"wrong\" compilerCommon : ${common} { VAR : ${var}", "} def test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b:", "= ${o1} ${o2} \"\"\" ) assert config.get_int('o1.foo.b') == 2 assert", "} \"\"\" ) assert config.get(\"bar\") == {'foo': 42, 'baz': 42}", "e: ${b} \"\"\", resolve=True) assert config == { 'a': 1,", "('a: 12 w', period(weeks=12)), ('a: 10 days', period(days=10)), ('a: 11", "shutil import tempfile from collections import OrderedDict from datetime import", "= [] var = \"wrong\" compilerCommon : ${common} { VAR", "3, 'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self): config =", "# check that the contents of both config files are", "= [perl] host_modules = ${common_modules} 55 \"\"\" ) with pytest.raises(ConfigWrongTypeException):", "ConfigFactory.parse_string( \"\"\" x : { v1: 1 } b1 :", "'a': {'d': 6} } assert expected == config_tree def test_merge_overriden(self):", "test2 [ \"c\", \"d\",] c = [ \"e\", \"f\" ]", "a: [ {a: 1, b: 2}, {a: 3, c: 4},", "{'a': 1, 'b': 2, 'f': 4}, {'a': 3, 'c': 4,", "comment 10 // comment 11 // comment 12 \"\"\" )", ") assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self):", "= ${STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment' }", "\"\"\" x = [1,2] x += [3,4] \"\"\" ) assert", "= ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow'", "${data-center-generic} { cluster-size = 9, opts = \"-Xmx4g\" } \"\"\"", "x += [3,4] \"\"\" ) assert config.get(\"x\") == [1, 2,", "test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert config ==", "'cr': '\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp', } config = ConfigFactory.parse_string(source)", "weeks c: bar \"\"\" ) assert config['b'] == period(weeks=10) def", "3} b = merged.get(\"b\") assert len(b) == 1 assert b[0]", "ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR} \"\"\") assert config == {", "assert config.get('a') == [1, 2, 3, 4, 5, 6] assert", "} \"\"\") assert config['database.user'] == 'test_user' assert config['database.pass'] == '<PASSWORD>'", "ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as period except Exception:", "test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = 123 a =", "= [php, python] host_modules = [java] ${common_modules} [perl] \"\"\" )", "test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"')", "d=woof, a-b-c-d=test, a b c d=test2, \"a b c d", "== 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException):", "bar: \"2\" # DO NOT CHANGE ANY OF THE ABOVE", "\"a\", \"b\", ] b = # test # test2 [", "b = foo c = bar } a.c = ${a.b}\"", "config = ConfigFactory.parse_string( \"\"\" x = {a: 1} x +=", "x = {y: [5,6]} x = {z: ${x}} \"\"\" )", "test no mutation on config1 assert result is not config1", "config == { 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self):", "peopledb pass = <PASSWORD> name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS}", "files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf,", "ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self):", "\"\"\" x = {y: {z: 1}} x = ${x.y} \"\"\"", "= ${?test} a = 5 \"\"\" ) assert config1['a'] ==", "\"\"\" ) assert config['a'] == {'a': 1, 'b': 2} assert", "{ c: 3, d: 4,} c = { e: 5,", "x = ${x.y} \"\"\" ) def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string(", "\"\"\" a = 121.22 b = -121.22 c = .54", "comment \"\"\".format(ws=' ')) assert config == { 'no_trailing_ws': \"foo bar", "a: 1 b: ${c} { pa: [${a}] pb: ${b.pa} }", "b: { c = ${e} } } d = test", "test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError):", "config = ConfigFactory.parse_string( \"\"\" app.heap_size = 128 app.java_opts = [", "11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos',", "assert result is not config1 # test no mutation on", "[java] \\ [python] \"\"\" ) assert config['common_modules'] == ['perl', 'java',", "config.get(\"x.y\") == [5, 6] assert config.get(\"x.z\") == {'x': [3, 4],", "test_assign_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = \"a\" b =", "6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( \"\"\" data-center-generic", "== \"1\" assert config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\") == \"123\"", "assert config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self): config =", "config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"]", "'a' assert config['b'] == 'b' assert config['c'] == 'c' def", "def test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a = 5 b=${a}${a}", "config['foo'] == '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"')", "long assert config['short'] == 12.12321 assert config['long1'] == 121.22E3423432 assert", "See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\":", "def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4]", "ConfigFactory.parse_string( \"\"\" a = 1 mid.b = 1 \"\"\" )", "\"\"\" a = \"a\" b = # test # test2", "6 } data-center-east = {name = \"east\"} ${data-center-generic} \"\"\" )", "year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ])", "\"\"\" a = 5 b=${a}${a} c=${a} ${a} \"\"\" ) assert", "config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') == ['java', 'php', 'python',", "with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException):", "def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo'] ==", "ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g']", "[1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string(", "3] \"\"\" ) (one, two, three) = config.get(\"x\") assert one", "value def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\" { a: {", "config.get(\"foo\") == {'c': 3, 'd': 4} assert set(config.keys()) == set(['bar',", "nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)),", "config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with", "foo ${x} bv d = foo ${x} 43 \"\"\") assert", "{ pb: 5 } \"\"\") assert 5 == config.b.pb def", "1, 'orange': 2, } config = ConfigFactory.from_dict(d) assert config ==", "def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\" list = [ 1,", "{ \"b.f.d\": 7 } \"\"\" ) assert config['\"a.b.c.d\"'] == 3", "assert config.get('t.e.y.h') == 'hey man' assert [v.strip() for v in", "== [5, 6] assert config.get(\"x.z\") == {'x': [3, 4], 'y':", "-1, 0, 1, 2, 3, 4, 5, 6] def test_self_append_array(self):", "2 } foo.d = 4 \"\"\" ) assert config.get(\"bar\") ==", "6] a: [ ${b} [1, 2] [3, 4] ${b} [1,", "${?NOT_EXISTS} } \"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass'] ==", "assert config.get('t') == [1, 2, 3] def test_missing_config(self): config =", ": 4 } } o3 = ${o1} ${o2} \"\"\" )", "8 } misc = \"mist\" } \"\"\" ) # use", "config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2,", "{ } d: { pc: ${b.pa} } e: ${b} \"\"\",", ") assert config.get('c') == 'test' assert config.get('g') == '6 test'", "host_modules = ${common_modules} aa \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "on python 2 long with be a long assert config['a']", "# test2 { c: 3, d: 4,} c = {", "test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.default-jvm-opts =", "config.get('a.b.c') == 7 assert config.get('a.b.d') == 8 def test_concat_dict(self): config", "assert config.get('t.j') == [1, 2, 3] assert config.get('t.u') == '192.168.1.3/32'", "== 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a =", "with be a long assert config['a'] == 121.22 assert config['b']", ": { c : ${bar.b}, d : 2 } foo.d", "\"\"\" ) assert config2.get('a.b.c') == 5 assert config2.get('d') == 'test", "have ${num} retries retries_msg = ${?CUSTOM_MSG} \"\"\") assert config ==", "\"\"\" ) assert config['a'] == [ {'a': 1, 'b': 2,", "4] assert config.get(\"x.y\") == [5, 6] assert config.get(\"x.z\") == {'x':", "min', '3 min'), ('a: 4 seconds', period(seconds=4)), ('a: 5 second',", "config == { 'num': 3, 'retries_msg': 'You have 3 retries'", "${x} bv d = foo ${x} 43 \"\"\") assert config", "test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\" database { host = localhost", "x = {x: [3,4]} x = [${x}, 2, 3] \"\"\"", "\"\"\" list += [ 4, 5, 6 ] \"\"\", resolve=False", "'hey man' assert [v.strip() for v in config.get('t.e.y.i').split('\\n')] == ['',", "('a: 11 week', period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a: 10", "111 microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a: 113 micro',", "'a b c' assert config.get('b') == '5 b' assert config.get('c')", "aa ${common_modules} bb \"\"\" ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string(", "source = r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\",", "d = -.54 \"\"\" ) # on python 3 long", "config = ConfigFactory.parse_string( \"\"\" test = line1 \\ line2 test2", "aa \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "\"\"\" ) assert config1.get('a.b.c') == 5 assert config1.get('d') == 5", "= [java] ${common_modules} [perl] full_modules = ${host_modules} [c, go] \"\"\"", "\"windows\": \"c:\\\\temp\" } \"\"\" expected = { 'plain-backslash': '\\\\', 'tab':", "= 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}]", ") assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config =", "a = 5 \"\"\" ) assert config1['a'] == 5 config2", "config['a.d'] == \"baz\" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\" a=1,", "= ConfigFactory.parse_string( \"\"\" a { b = foo c =", "\"\"\") assert config == { 'bool_from_env': 'false' } assert config.get_bool('bool_from_env')", "} assert expected_result == config def test_parse_with_comments(self): config = ConfigFactory.parse_string(", "bar\") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo", "ConfigFactory.parse_string( \"\"\" bar : { foo : 42, baz :", "bars if 'type' in bar} print(types, '(((((') assert '<NAME>' in", "45 b = ${?c} d = ${?c} 4 e =", "\"\"\" ) assert config.get(\"x\") == {'a': 1, 'b': 2, 'c':", "('a: 2 year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a: 3y',", "fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: {{ include \"{tmp_file}\" c:", "= abc x += def \"\"\" ) assert config.get(\"x\") ==", "4 assert config['b'] == 5 assert config['c'] == 6 def", "b += [${b2}] \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2)", "= // abc abc c = 5 \"\"\") assert config", "[5, 6] assert config.get(\"x.z\") == {'x': [3, 4], 'y': [5,", "== 'the man' assert config.get('d') == 'woof' assert config.get('a-b-c-d') ==", "bar: [\"a\"] } sub : ${base} { baz: ${base.bar} [\"b\"]", "{ 'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config =", "python 3 long will be an int but on python", "config_tree == { 'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self):", "{ 'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab': '\\\\t', 'newline': '\\n', 'no-newline':", "5 def test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com { us {", "} } www.example-ö.com { us { name = \"second domain\"", "[java] ${common_modules} [perl] full_modules = ${host_modules} [c, go] \"\"\" )", "'perl', 'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\" main_language", "== '5' def test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert", "= { cluster-size: 8 } misc = \"mist\" } \"\"\"", "\"${?name} \"\"\", resolve=False ) config6 = config4.with_fallback(config5) assert config6 ==", "= 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"') with pytest.raises(ConfigWrongTypeException):", "{ cluster-size = 6 } data-center-east = {name = \"east\"}", "= 6 } data-center-east = { name = \"east\" }", "= ${?NOT_EXISTS} } \"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass']", "'-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" {", "= { cluster-size = 6 } data-center-east = { name", "= { foo : { a : 1 b :", "('a: 1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a: 3 m',", "= \"str \" } } d = ${a.b.c} f =", "= { \"a\": { \"d\": { \"g\": { \"h\": {", "['+', '`', '^', '?', '!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self,", "// foo.c should end up as 3 foo : {", "} } d = test ${a.b.c} f = test ${a.b.e}", ": 1 } } foo : ${foo.a} foo : {", "} config = ConfigFactory.parse_string(source) assert config == expected assert config", "5 minutes,] c: bar \"\"\" ) assert config['b'] == ['a',", "1 months', relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)),", "} \"\"\" ) assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') ==", "= ConfigFactory.parse_string( \"\"\" common_modules = [perl] \\ [java] \\ [python]", "= ConfigFactory.parse_string( \"\"\" dict = ${dict} { y: 2 }", "5 bv', 'd': 'foo 5 43' } def test_complex_substitutions(self): config", "1 b: 2 include package(\"my_module:my.conf\") \"\"\" ) # check that", "d def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] = 3 d['apple']", "and necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close()", "{ foo : 42, baz : ${bar.foo} } \"\"\" )", "3, t: 4} \"\"\") assert 'r' in config_tree['foo'] and 't'", "\"\"\") assert 'abc' == config['/abc/cde1'] assert 'cde' == config['/abc/cde2'] assert", "= php languages = [java, ${main_language}] \"\"\" ) assert config.get('languages')", ") assert [x.strip() for x in config['common_modules'].split() if x.strip(' ')", "1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)),", "\"foo\" \"bar \"{ws} trailing_ws_with_comment = \"foo\" \"bar \"{ws}// comment \"\"\".format(ws='", "a : 2 } \"\"\" ) assert config.get('foo') == {'a':", "\"\"\" a: 1 b: foo c: ${a} ${b} c: ${b}", "You have ${num} retries retries_msg = ${?CUSTOM_MSG} \"\"\") assert config", "\"\"\" a { d { g.h.j.u: 5 g { h.d:", "test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 =", "} assert expected == config config2 = ConfigFactory.parse_string( \"\"\" a", "== 6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size') == 6", "test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y = ${x}') fdin.flush() config", "\"\"\" ) assert config.get('c') == 'test' assert config.get('g') == '6", "data-center-east = {name = \"east\"} ${data-center-generic} { cluster-size = 9,", "5\") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo", "== config1 def test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" a =", "== [1, 2, 3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = aa", "of both config files are available assert dict(config.as_plain_ordered_dict()) == {'a':", "with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self): # Adress issue #102 config_tree", "== ['c', 'd'] assert config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self):", "config = ConfigFactory.parse_string( \"\"\"base : { bar: [\"a\"] } sub", "'perl'] assert config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c', 'go']", "assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c',", "4 def test_concat_string(self): config = ConfigFactory.parse_string( \"\"\" a = a", "a: { b: 5 } } \"\"\" ) assert config.get_string('a.b')", "foo \"bar\" ${b} dummy c = foo ${x} bv d", "{y: [5,6]} x = {z: ${x}} \"\"\" ) assert config.get(\"x.x\")", "'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\"", "${?c} d = ${?c} 4 e = ${?a} g =", "= [perl] host_modules = aa ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException):", "test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\" x += {a: 1} \"\"\"", "def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo = 5\") assert config['foo'] ==", "foo : { c : ${bar.b}, d : 2 }", "\"\"\" x = ${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException):", "} b # test # test2 { c: 3, d:", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} \"4\"') def test_string_substitutions(self): config1", "assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') == ['java', 'php',", "assert config3.get('a.b.c') == 5 assert config3.get('d') == 'test 5 me'", "config == { 'test': 'line1 line2', 'test2': 'test' } def", "@mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\" int_from_env =", "= ConfigFactory.parse_string( \"\"\" a : { } b : 1", "[1, 2, 3] } config = ConfigFactory.from_dict(d) assert config ==", "\"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert 'abc\"test' == config['quoted'] assert 'abc\"test'", "== 5 assert config.get('b.d') == 4 def test_concat_string(self): config =", "{} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {f:", "== 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( \"\"\"", "a = [ 1, 2, ] b = # test", "long with be a long assert config['short'] == 12.12321 assert", "ConfigFactory.parse_string( \"\"\" { a: { b: { c = 5", "\"\"\" ) assert config2['database']['host'] == 'other.host.net' assert config2['database']['port'] == 433", "assert config.get(\"bar\") == {'foo': 43, 'baz': 43} assert set(config.keys()) ==", "= ConfigFactory.parse_string( \"\"\" a = \"a\" b = # test", "day', period(days=11)), ('a: 12 d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)),", "data-center-generic = { cluster-size: 8 } misc = \"mist\" }", "config file from 'my_module' config = ConfigFactory.parse_string( \"\"\" a: 1", "\"str \" } } d = test ${a.b.c} me f", "config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin:", "test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence:", "Test scientific expression of number \"\"\" config = ConfigFactory.parse_string( \"\"\"", "assert config == d def test_object_concat(self): config = ConfigFactory.parse_string( \"\"\"o1", "5 b c = b 7 \"\"\" ) assert config.get('a')", "a = 4 b = # test # test2 5", "} data-center-east = {name = \"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east}", "config = ConfigFactory.parse_string( \"\"\" a: foo b: [a, 1, 10", "${b} [1, 2] [3, 4] ${b} [1, 2] ${b} [7,", ") assert config['\"a.b.c.d\"'] == 3 assert config['t.d.c'] == 5 assert", "config = ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert", "['java', 'php', 'python'] config3 = ConfigFactory.parse_string( \"\"\" common_modules = [php,", "config1.with_fallback(config2) assert config3['a'] == { 'b': 1, 'c': 2, 'd':", "assert config == { 'include-database': True } def test_substitution_override(self): config", "${common_modules} bb \"\"\" ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\"", "\"\"\" ) assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def", "= ConfigFactory.parse_string( \"\"\" a: [ include \"dummy.txt\" 3 4 ]", "{ d: 5 } h.i { e:65 } } \"\"\")", "} def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\" x = 5", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = ${common_modules} aa \"\"\"", "b: { c = 5 } } d = test", "${?a} g = ${?c1} ${?c2} h = ${?c1} ${?c2} 1", "config = ConfigFactory.parse_string( \"\"\" a = { a: 1, b:", "config2 = ConfigFactory.parse_string( \"\"\" string = ${string}def \"\"\", resolve=False )", "try: module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') #", "test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2, 3]\") assert config == [1,", "ConfigFactory.parse_string( \"\"\" a = {f: 5} common_modules ${a} {perl: 1}", "application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] == [", "test2 \"b\" c = \"c\" \"\"\" ) assert config['a'] ==", "\\ [java] \\ [python] \"\"\" ) assert config['common_modules'] == ['perl',", "def test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [php, python]", "\"\"\" a = null b = [null] \"\"\" ) assert", "not in config assert config['h'] == 1 def test_cascade_optional_substitution(self): config", "ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string(", "== 121.22 assert config['b'] == -121.22 assert config['c'] == .54", "b: [a, 1, 10 weeks, 5 minutes,] c: bar \"\"\"", "ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow' assert config.get('animals.mutt.hates.garfield.say') == 'meow' def", "= ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules =", "assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True)", "12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative'] == -15 def test_assign_float(self):", "def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [ 1,", "me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a = 5", "'(((((') assert '<NAME>' in names assert 'Homer\\'s favorite coffee' in", "= ${Test.field2}\"3\" } \"\"\" ) assert config.get_string(\"A.field1\") == \"1\" assert", "= test ${a.b.c} } \"\"\" ) assert config2.get('a.b.c') == 5", "name = peopledb pass = <PASSWORD> } user=test_user pass=<PASSWORD> database", "[1, 2, 3, 4, 5, 6] assert config.get_list('a') == [1,", "= ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts", "assert config['a'] == 4 assert config['b'] == 5 assert config['c']", "= ConfigFactory.parse_string( \"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert config ==", "= ConfigFactory.parse_string( \"\"\" a: [ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) )", "2, 3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string(", "assert config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin:", "\\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\"", "\"b.f.d\": 7 } \"\"\" ) assert config['\"a.b.c.d\"'] == 3 assert", "1 assert set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "\"d\",] c = [ \"e\", \"f\" ] \"\"\" ) assert", "1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self): ''' Example from", "\"\"\" bar : { foo : 42, baz : ${bar.foo}", "= ${a.d} \"\"\" ) assert config.get(\"a\") == {'b': 3, 'c':", "config.get('b') == '5 b' assert config.get('c') == 'b 7' def", "c d') == 'test2' assert config.get('a b c d e')", "} def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\" list = [", "'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res", "'6 sec'), ('a: 7 hours', period(hours=7)), ('a: 8 hour', period(hours=8)),", "(ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as period", "config = ConfigFactory.parse_string( \"\"\" a: foo b: 10 weeks c:", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = ${common_modules} 55", "config1 = ConfigFactory.parse_string( \"\"\" a = { b: 1 c:", "ConfigFactory.parse_string( \"\"\" list = ${list} [ 4, 5, 6 ]", "ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert config == { 'include-database': True", "110 microseconds', period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a: 112 micros',", "def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules", "assert config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self):", "'b': 2, 'c': 3, 'z': 0, 'y': -1, 'd': 4}", "== 5 assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') == 7", "= 7 h.i { d: 5 } h.i { e:65", "assert config.get('d') == 'test 7 me' def test_multiple_substitutions(self): config =", "${x} {v2: 3} b += [${b2}] \"\"\", resolve=False ) merged", "import OrderedDict from datetime import timedelta from pyparsing import ParseBaseException,", "2] [3, 4] ${b} [1, 2] ${b} [7, 8] ]", "# comment 7 2, # comment 8 3, # comment", "pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as fdin:", "set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON", "def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\" \"bar", "{'a': 1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def", "5 config2 = ConfigFactory.parse_string( \"\"\" { a: { b: {", "6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a =", "'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\" common_modules = perl", "'{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\" /abc/cde1:", "config1.get('d') == 'str' assert config1.get('f') == 'str ' config2 =", "== config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [", "assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from", "== [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] == [", "\"\"\" ) assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') == 3", "name = \"east\" } \"\"\" ) assert config5['data-center-east'] == {", "\"\"\" x = [1,2] x = ${x} [3,4] x =", "isinstance(config['negative'], int) assert config['negative'] == -15 def test_assign_float(self): config =", "ConfigFactory.parse_string( \"\"\" list = [ 1, 2, 3 ] \"\"\"", "8000 url = ${database.host}\":\"${database.port} } database { host = ${?DB_HOST}", "config3.get('d') == 'test str me' assert config3.get('f') == 'test str", "test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\" string = abc \"\"\" )", "${bar} foo: ${baz} bar: {r: 1, s: 2} baz: {s:", "== {'e': 5, 'f': 6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string(", "\"foo bar \" } def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\"", "merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\" x", "name = peopledb pass = <PASSWORD> name = ${?NOT_EXISTS} pass", "1130000 nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self,", "def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self): with", "ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR} \"\"\") assert config == {", "\"\"\", resolve=False ) result = config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef'", "\"\"\" ) assert config.get(\"x\") == \"abc def\" def test_self_append_non_existent_string(self): '''", "\"cde\" /abc/cde3: \"fgh\" \"\"\") assert 'abc' == config['/abc/cde1'] assert 'cde'", "4 b = # test # test2 5 c =", "== set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x =", "c=the man, d=woof, a-b-c-d=test, a b c d=test2, \"a b", "} } k { \"b.f.d\": 7 } \"\"\" ) assert", ") def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x}", "\"\"\" ) assert config.get(\"x\") == {'a': 1, 'b': 2} def", "\"\"\" no_trailing_ws = \"foo\" \"bar \" trailing_ws = \"foo\" \"bar", "'\\\\r', 'windows': 'c:\\\\temp', } config = ConfigFactory.parse_string(source) assert config ==", "== 'abcdef' # test no mutation on config1 assert result", "expected_res config2 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3 d:", "d['apple'] = 4 d['pear'] = 1 d['tree'] = { 'a':", "${?a} \"\"\", resolve=False) source = ConfigFactory.parse_string( \"\"\" b: 14 \"\"\")", "/abc/cde1: abc \"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\") assert 'abc' ==", "[3, 4]} assert two == 2 assert three == 3", "resolve=False) source = ConfigFactory.parse_string( \"\"\" b: 14 \"\"\") config =", "no mutation on config1 assert result is not config1 #", "\"\"\" ) assert config2.get('host_modules') == ['java', 'php', 'python'] config3 =", "assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east' config3 =", "and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as", "test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 ==", "len(b) == 1 assert b[0] == {\"v1\": 2, \"v2\": 3}", "config.get('b.c') == 5 assert config.get('b.d') == 4 def test_concat_string(self): config", "def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} \"\"\"", "'test' assert config.get('g') == '6 test' assert config.get('a.b') == 'test'", "def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the man,", "assert config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys()) == set(['y', 'z']) def", "ConfigFactory.parse_string( \"\"\" a.b = 4 a.d = 3 \"\"\" )", "test_concat_list(self): config = ConfigFactory.parse_string( \"\"\" a = [1, 2] [3,", "config = ConfigFactory.parse_string( \"\"\" num = 3 retries_msg = You", "'newline': '\\n', 'no-newline': '\\\\n', 'cr': '\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp',", "== 4 assert config['b'] == 5 assert config['c'] == 6", "ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} def \"\"\" ) def", "so show raise an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self):", "import json import os import shutil import tempfile from collections", "{v2: 2 } b = [${b1}] \"\"\", resolve=False ) config2", "'abc\\ntest\\n', 'b': [1, 2, 3] } config = ConfigFactory.from_dict(d) assert", "{a: 3, c: 4}, ] \"\"\" ) assert config['a'] ==", "config = ConfigFactory.parse_string( \"\"\" a = \"a\" b = #", ": ${a} { } \"\"\", resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict()", "config.get_int('t.e.y.f') == 7 assert config.get('t.j') == [1, 2, 3] assert", "} d = ${a.b.c} f = ${a.b.e} } \"\"\" )", "on config1 assert result is not config1 # test no", "('a: 9 h', period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a: 11", "('a: 113 micro', period(microseconds=113)), ('a: 114 us', period(microseconds=114)), ('a: 110", "test_optional_with_merge(self): unresolved = ConfigFactory.parse_string( \"\"\" foo: 42 foo: ${?a} \"\"\",", "[] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow'", "config_tree def test_merge_overriden(self): # Adress issue #110 # ConfigValues must", "= ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t { \"d\": { \"c\":", "config['long2'] == 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config", "${pass} } \"\"\") assert config['database.user'] == 'test_user' assert config['database.pass'] ==", "a: foo b: [a, 1, 10 weeks, 5 minutes,] c:", "\"first line\" \"second\" line \\\"\\\"\\\" } j = [1, 2,", "${baz} bar: {r: 1, s: 2} baz: {s: 3, t:", "== {'b': 3, 'c': [1, 2], 'd': {'foo': 'bar'}} def", "= ConfigFactory.parse_string( \"\"\" database { name = peopledb pass =", "config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\"", "{ data-center-generic = { cluster-size: 8 } misc = \"mist\"", "fdin: fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string( \"\"\" include", "${host_modules} [c, go] \"\"\" ) assert config4.get('common_modules') == ['php', 'python']", "('a: 3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3 mon',", "2 } } assert expected == config config2 = ConfigFactory.parse_string(", "'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string(", "5 g } \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def", "b: 2, } b # test # test2 { c:", "} assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string = u\"\"\"", "config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR} \"\"\") assert config", "{ foo : { b : 3 c : 4", "{x: [3,4]} x = [${x}, 2, 3] \"\"\" ) (one,", "= [ 1, 2, ] b = # test #", "} \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with", "ConfigFactory.parse_string( \"\"\" x += def \"\"\" ) assert config.get(\"x\") ==", "b = ${c} c = ${a} \"\"\") def test_assign_number_with_eol(self): config", "x = [1,2] x = ${x} [3,4] x = [-1,", "assert len(bars) == 10 names = {bar['name'] for bar in", "timedelta as period class TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string(", "ConfigFactory.from_dict(d) assert config == d def test_object_concat(self): config = ConfigFactory.parse_string(", "= [php, python] host_modules = ${common_modules} [java] \"\"\" ) assert", "{ 'a': { 'garfield': { 'say': 'meow' }, 't': 2", "\"\"\" ) assert config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys()) == set(['y'])", "1 # comment 2 { c = test // comment", "114 us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond',", "{a: 1, b: 2}, {a: 3, c: 4}, ] \"\"\"", "'\"second\" line', ''] assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f') ==", "def test_parse_override(self): config = ConfigFactory.parse_string( \"\"\" { a: { b:", "== '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo = 5\") assert", "== { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config", "def test_sci_real(self): \"\"\" Test scientific expression of number \"\"\" config", "from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" bar :", "test_dos_chars_with_triple_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo'] == '5'", "\"\"\" x += {a: 1} \"\"\" ) assert config.get(\"x\") ==", "\"1\" assert config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\") == \"123\" assert", "'abcdef' # test no mutation on config1 assert result is", "= ConfigFactory.parse_string( \"\"\" a.b = 3 a.b = ${a.b} a.b", "def test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\" database { host =", "} \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" dict = ${dict}", ": { foo : 42, baz : ${bar.foo} } \"\"\"", "d = test ${a.b.c} me e = 7 } \"\"\"", "= ${string}def \"\"\", resolve=False ) result = config2.with_fallback(config1) assert result.get(\"string\")", "assert config2.get('f') == 'test str ' config3 = ConfigFactory.parse_string( u\"\"\"", "1 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" dict =", "('a: 6 sec', '6 sec'), ('a: 7 hours', period(hours=7)), ('a:", "comment 7 2, # comment 8 3, # comment 9", "assert config['negative'] == -15 def test_assign_float(self): config = ConfigFactory.parse_string( \"\"\"", "\"\"\" ) def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec '''", "test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: { b: {", "3, 'apple': 4, 'pear': 1, 'orange': 2, } config =", "'foo' } def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\" { data-center-generic", "baz : ${bar.foo} } \"\"\" ) assert config.get(\"bar\") == {'foo':", "== ['', '\"first line\"', '\"second\" line', ''] assert config.get_bool('t.d') is", "4 assert config['e'] == 45 assert 'g' not in config", "'false' } assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self):", "2 } \"\"\" ) expected = { 'a': { 'garfield':", "[php, python] host_modules = [java] ${common_modules} [perl] full_modules = ${host_modules}", "{'pa': [1], 'pb': [1]}, 'c': {}, 'd': {'pc': [1]}, 'e':", "= ConfigFactory.parse_string( u\"\"\" { a: { b: { c =", "assert config.get(\"bar\") == {'a': 4, 'b': 3} assert config.get(\"foo\") ==", "assert config.get_int('t.c') == 5 assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f')", "== 7 assert config.get('d') == 'test 7 me' def test_multiple_substitutions(self):", "php languages = [java, ${main_language}] \"\"\" ) assert config.get('languages') ==", "b : 2 } } o2 = { foo :", "== {'a': 1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True)", "'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string( \"\"\" data-center-generic =", "4} \"\"\" ) assert config.get(\"x\") == {'a': 1, 'b': 2,", "test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = { f: 5", "d = foo ${x} 43 \"\"\") assert config == {", "= ConfigFactory.parse_string( \"\"\" foo : { a : { c", "for v in config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"', '\"second\" line',", "5 == config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\" quoted:", "fdin.flush() config = ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x = 42", "def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\" x += {a: 1}", "as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\" {", "= [ \"e\", \"f\" ] \"\"\" ) assert config['a'] ==", "== ['java', 'php', 'python', 'perl'] assert config4.get('full_modules') == ['java', 'php',", "def test_from_dict_with_dict(self): d = { 'banana': 3, 'apple': 4, 'pear':", "set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x}", "def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = {f: 4}", "'!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey", "v1: 1 } b = [${b1}] \"\"\", resolve=False ) config2", "application.foo = 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g,", "[5, 6, 1, 2], [3, 4, 5, 6], [1, 2,", "\"\"\" ) assert config['common_modules'] == ['perl', 'java', 'python'] def test_concat_multi_line_dict(self):", "config = ConfigFactory.parse_string( \"\"\" a = 45 b = ${?c}", "= 123 a = ${?test} a = 5 \"\"\" )", "('a: 114 us', period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111", "1 assert set(config.get(\"x\").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config =", "{ 'a': 'abc\\ntest\\n', 'b': [1, 2, 3] } config =", "== 1 assert set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException):", "root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1 and config['b'] == 1 def", "${b.pa} } c: { } d: { pc: ${b.pa} }", "'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\"", "line', ''] assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f') == 7", "Should be equivalent to x = ${?x} def ''' config", "os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create the module", ") # use unicode path here for regression testing https://github.com/chimpler/pyhocon/issues/44", ": ${x} {v2: 3} b += [${b2}] \"\"\", resolve=False )", "comment 4 b: test, # comment 5 } # comment", "\"\"\" dict = ${dict} { y: 2 } \"\"\", resolve=False", "'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self): unresolved = ConfigFactory.parse_string(", "} foo.d = 4 \"\"\" ) assert config.get(\"bar\") == {'a':", "test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base : { bar: [\"a\"] }", "} \"\"\" ) assert config.get_string('t.c') == '5' assert config.get_int('t.c') ==", "== '5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes', period(minutes=1)), ('a: 1minutes',", "\"b\"] assert config.get_list('sub2.baz') == [\"a\", \"b\"] def test_plain_ordered_dict(self): config =", "= [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [", "{ c: 3, d: 4,} c { e: 5, f:", "def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {}", "python \"\"\" ) assert [x.strip() for x in config['common_modules'].split() if", "== config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\"", "'b': 2, 'f': 4}, {'a': 3, 'c': 4, 'f': 4},", "8 } } \"\"\" ) assert config.get('a.b.c') == 7 assert", "\"\"\" ) assert config1['a'] == [3, 4] def test_include_required_file(self): config", "\"\"\" ) assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') ==", "abc \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" string = ${string}def", "{ cluster-size = 6 } data-center-east = ${data-center-generic} {name =", "{ e:65 } } \"\"\") expected_result = { \"a\": {", ": ${bar.foo} } \"\"\" ) assert config.get(\"bar\") == {'foo': 42,", "= \"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") ==", "} \"\"\" ) assert config.get('foo') == {'a': 2, 'c': 1}", "\"123\" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes:", "[1, # comment 7 2, # comment 8 3, #", "[-3, -2, -1, 0, 1, 2, 3, 4, 5, 6]", "assert 'abc\"test' == config['quoted'] assert 'abc\"test' == config['unquoted'] def test_escape_quote_complex(self):", "ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def", "config.get(\"x\") == {'a': 1, 'b': 2, 'c': 3, 'z': 0,", "'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') == 'meow'", "7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\" a { b", "= ConfigFactory.parse_string( \"\"\" a = foo bar \"\"\") assert config", "ConfigFactory.parse_string( \"\"\" a = [1, 2] [3, 4] [ 5,", "== 8 def test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\" a: {b:", "host_modules = [java] ${common_modules} [perl] \"\"\" ) assert config3.get('common_modules') ==", "\"\"\" ) assert config['a'] == [ [5, 6, 1, 2],", "a = foo \"bar\" dummy \"\"\") assert config == {", "x = {y: -1} ${x} {d: 4} \"\"\" ) assert", "5 assert config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string(", "a long assert config['a'] == 121.22 assert config['b'] == -121.22", "config['a.b'] == \"foo\" assert config['a.c'] == \"foo foo\" assert config['a.d']", "= ConfigFactory.parse_string( \"\"\" bar : { foo : 42, baz", "bar test dummy', 'c': 'foo 5 bv', 'd': 'foo 5", "assert 5 == config_tree.pop('a.c', 5) expected = { 'a': {'d':", "assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') == 'hey dude!' assert", "3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = { 'a':", "config.get('a.b') == 'test' assert config.get_string('a.b') == 'test' assert config.get('t') ==", "\"\"\" a = ${?a}foo \"\"\" ) assert config.get(\"a\") == 'foo'", "2, 3] } config = ConfigFactory.from_dict(d) assert config == d", "url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2] def", "\"long \"${?name} \"\"\", resolve=False ) config6 = config4.with_fallback(config5) assert config6", "period(weeks=11)), ('a: 12 w', period(weeks=12)), ('a: 10 days', period(days=10)), ('a:", "[java] ${common_modules} \"\"\" ) assert config2.get('host_modules') == ['java', 'php', 'python']", "a: [ ${b} [1, 2] [3, 4] ${b} [1, 2]", "def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x : 10,", "STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR}", "test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo = bar\") assert config['foo'] == 'bar'", "field2 = ${Test.field1}\"2\" field3 = ${Test.field2}\"3\" } \"\"\" ) assert", "== [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException):", "hey man i = \\\"\\\"\\\" \"first line\" \"second\" line \\\"\\\"\\\"", "'d': 4} assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): '''", "\"c\": 5 } } k { \"b.f.d\": 7 } \"\"\"", "} def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo = bar\") assert config['foo']", "'5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo = 5\") assert config['foo']", "pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" no_trailing_ws =", "config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self):", "== 'test 7 me' def test_multiple_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", "1 assert config.get('b') == 'abc' assert config.get('c') == 'the man'", "test ${a.b.e} } \"\"\" ) assert config2.get('a.b.c') == 'str' assert", "3] } config = ConfigFactory.from_dict(d) assert config == d def", "from 'my_module' config = ConfigFactory.parse_string( \"\"\" a: 1 b: 2", "'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are in the same", "65 } } } } assert expected_result == config def", "config3 = ConfigFactory.parse_string( \"\"\" a: [ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name)", "\"v2\": 3} b = merged.get(\"b\") assert len(b) == 1 assert", "from datetime import timedelta as period class TestConfigParser(object): def test_parse_simple_value(self):", "def test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\" A = ${Test} Test", "2] config3 = ConfigFactory.parse_string( \"\"\" a: [ include url(\"file://{tmp_file}\") ]", "import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta as", "assert isinstance(config['negative'], int) assert config['negative'] == -15 def test_assign_float(self): config", "config1.get('a.b.c') == 5 assert config1.get('d') == 5 config2 = ConfigFactory.parse_string(", "config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string(", "'d'] assert config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config =", "${TRUE_OR_FALSE} \"\"\") assert config == { 'bool_from_env': 'false' } assert", "\"\"\" ) assert config == { 'a': 5, 'b': '55',", "4 d['pear'] = 1 d['orange'] = 2 config = ConfigFactory.from_dict(d)", ") def test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a:", "both config files are available assert dict(config.as_plain_ordered_dict()) == {'a': 1,", "ConfigFactory.parse_string( \"\"\" common_modules = {a:perl} \\ {b:java} \\ {c:python} \"\"\"", "= [1,2] x = ${x} [3,4] x = [-1, 0]", "'b': [1, 2, 3] } config = ConfigFactory.from_dict(d) assert config", "5' config3 = ConfigFactory.parse_string( \"\"\" { a: { b: {", "common_modules = ${non_existent} abc \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert config == { 'bool_from_env': 'false'", "== \"foo foo\" assert config['a.d'] == \"baz\" def test_comma_to_separate_expr(self): config", "datetime import timedelta as period class TestConfigParser(object): def test_parse_simple_value(self): config", "include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == expected_res config3", "5 \"\"\" ) # b is not set so show", "${c} { pa: [${a}] pb: ${b.pa} } c: { }", "ConfigFactory.parse_string( \"\"\" a: {b: 1} a: {c: 2} b: {c:", "assert 5 == config.b.pb def test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\"", "monkeypatch.syspath_prepend(temp_dir) # load the config and include the other config", "4} \"\"\") assert 'r' in config_tree['foo'] and 't' in config_tree['foo']", "c = 5 } } d = ${a.b.c} } \"\"\"", "= ConfigFactory.parse_string( \"\"\" a = 4 b = # test", "} def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert", "test_include_dict(self): expected_res = { 'a': 1, 'b': 2, 'c': 3,", "4] ${b} [1, 2] ${b} [7, 8] ] \"\"\" )", "test_from_dict_with_dict(self): d = { 'banana': 3, 'apple': 4, 'pear': 1,", "include required(file(\"samples/animals.d/cat.conf\")) t = 2 } \"\"\" ) assert expected", "config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test' assert config.get('a b", "\"\"\" ) assert config['a'] == 4 assert config['b'] == 5", "2 include package(\"my_module:my.conf\") \"\"\" ) # check that the contents", "num = 3 retries_msg = You have ${num} retries retries_msg", "assert isinstance(config['short'], int) assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int)", "config1.get('a.b.c') == 'str' assert config1.get('d') == 'str' assert config1.get('f') ==", "1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2, 3]\")", "3}\") # add the temp dir to sys.path so that", "retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a =", "== 'test' assert config.get('a b c d') == 'test2' assert", "[1, 2, 3] assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is", "b = # test # test2 5 c = 6", "\"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" a.b = 4 a.d", "1, 'c': 2, 'd': 3 } config4 = ConfigFactory.parse_string( \"\"\"", "None def test_parse_override(self): config = ConfigFactory.parse_string( \"\"\" { a: {", "test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\" a: [ [1, 2] [3,", "== config_tree def test_merge_overriden(self): # Adress issue #110 # ConfigValues", "coffee' in names assert 'milk' in types def test_list_of_dicts(self): config", "1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)),", "b: { pb: 5 } \"\"\") assert 5 == config.b.pb", "'\"first line\"', '\"second\" line', ''] assert config.get_bool('t.d') is True assert", "4, 5, 6] assert config.get_list('a') == [1, 2, 3, 4,", "6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config =", "long2 = 121.22e3423432 neg_long2 = 121.22e-3 \"\"\" ) # on", "test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\" a: foo b: 10 weeks", "test_with_comment_on_last_line(self): # Adress issue #102 config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\"", "\"\"\" ) assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a: 1", "expression of number \"\"\" config = ConfigFactory.parse_string( \"\"\" short =", "${a.c} a.d = {foo: bar} a.d = ${a.d} \"\"\" )", "config.get('g') == '6 test' assert config.get('a.b') == 'test' assert config.get_string('a.b')", "[perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string(", "no_trailing_ws = \"foo\" \"bar \" trailing_ws = \"foo\" \"bar \"{ws}", "1 d['tree'] = { 'a': 'abc\\ntest\\n', 'b': [1, 2, 3]", "} } a.b { c = 7 d = 8", "period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2, 3]\") assert", "= ${?c1} ${?c2} 1 \"\"\") assert 'b' not in config", "4] def test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\" a { include", "ConfigFactory.parse_string( \"\"\" x = {y: {y: 1}} x = ${x.y}", "4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\" a: [", "ConfigFactory.parse_string('a = {b: 5} \"4\"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string(", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} abc \"\"\" )", "= ${a.b.c} } \"\"\" ) assert config1.get('a.b.c') == 5 assert", "== { 'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ,", "['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( \"\"\" common_modules =", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = ${common_modules}", "\"\"\" a { a: 1, b: 2, } b #", "test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\" x += [1,2] \"\"\" )", "with be a long assert config['short'] == 12.12321 assert config['long1']", "== 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self): unresolved =", "} def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo", "2 assert three == 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string(", "} a.c = ${a.b}\" \"${a.b} a.d = baz \"\"\" )", "\\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence']", "set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec", "domain\" } } www.example-ö.com { us { name = \"second", "so that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load the", "\"east\" } \"\"\" ) assert config5['data-center-east'] == { 'name': 'east',", "assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert", "config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"5\"')", "== \"baz\" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\",", "4] def test_self_append_string(self): ''' Should be equivalent to x =", "2 long with be a long assert config['a'] == 121.22", "CHANGE ANY OF THE ABOVE SETTINGS!\"\"\") assert config_tree == {", "('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000", "a : { c : 1 } } foo :", "{\"v1\": 2, \"v2\": 3} b = merged.get(\"b\") assert len(b) ==", "{ 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config =", "package(\"my_module:my.conf\") \"\"\" ) # check that the contents of both", "= 192.168.1.3/32 g = null } \"\"\" ) assert config.get_string('t.c')", "config = ConfigFactory.from_dict(d) assert config == d def test_object_concat(self): config", ": ${foo.a} foo : { a : 2 } \"\"\"", "ConfigFactory.parse_string( \"\"\" { a: { b: { c = ${e}", "'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\" {", "2}, {'a': 3, 'c': 4} ] def test_list_of_lists(self): config =", "# Adress issue #102 config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\" bar:", "man' assert [v.strip() for v in config.get('t.e.y.i').split('\\n')] == ['', '\"first", "c : ${a} { d : [ ${b} ] }", "{f: 4} a: [ ${b} {a: 1, b: 2}, {a:", "ConfigFactory.parse_string( \"\"\" a: [ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert", "config.get('foo') == {'a': 2, 'c': 1} assert set(config.keys()) == set(['foo'])", "= ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\" \"bar \" trailing_ws =", "= ConfigFactory.parse_string( \"\"\" x = [1,2] x += [3,4] \"\"\"", "3} {d: 4} { c: 5 } \"\"\" ) assert", "'^', '?', '!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char): value", "a.d = ${a.d} \"\"\" ) assert config.get(\"a\") == {'b': 3,", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} def \"\"\"", ") assert config['a'] == {'a': 1, 'b': 2} assert config['b']", "= ConfigFactory.parse_string( \"\"\" x = abc x += def \"\"\"", "= \"str \" } } d = test ${a.b.c} me", "${b} [7, 8] ] \"\"\" ) assert config['a'] == [", "2, 'd': 3 } config4 = ConfigFactory.parse_string( \"\"\" name: foo", "assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "\"foo bar \", 'trailing_ws': \"foo bar \", 'trailing_ws_with_comment': \"foo bar", "'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a", "{'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" x = ${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with", "{foo: bar} a.d = ${a.d} \"\"\" ) assert config.get(\"a\") ==", "set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\" x =", ") expected = { 'a': { 'garfield': { 'say': 'meow'", "assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self):", "str' assert config2.get('f') == 'test str ' config3 = ConfigFactory.parse_string(", "main_language = php languages = [java, ${main_language}] \"\"\" ) assert", "{z: ${x}} \"\"\" ) assert config.get(\"x.x\") == [3, 4] assert", "\"localhost\" port = 8000 url = ${database.host}\":\"${database.port} } database {", "utf-8 -*- import json import os import shutil import tempfile", "in str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\" a =", "== 3 assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42) ==", "}, \"d\": 4, \"k\": \"f d\" } } }, \"h\":", "${a.b.c} me e = 7 } \"\"\" ) assert config.get('a.b.c')", "3, 4] def test_self_append_string(self): ''' Should be equivalent to x", "config['b'] == ['c', 'd'] assert config['c'] == ['e', 'f'] def", "longName: \"long \"${?name} \"\"\", resolve=False ) config6 = config4.with_fallback(config5) assert", "= ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the man, d=woof, a-b-c-d=test, a", "long with be a long assert config['short'] == 12 assert", "config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string(", "1 assert config.get_int('o3.foo.c') == 4 def test_issue_75(self): config = ConfigFactory.parse_string(", "period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config", "config.get('a b c d') == 'test2' assert config.get('a b c", "config2 = ConfigFactory.parse_string( \"\"\" dict = ${dict} { y: 2", "len(b) == 2 assert b[0] == {'v2': 2} assert b[1]", "ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert len(b) == 2", "ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\" \"bar \" trailing_ws = \"foo\"", "\"\"\" ) assert config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\") == \"12\"", "assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\"", "4 }} \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def test_include_substitution(self):", "\"str \" } } d = ${a.b.c} f = ${a.b.e}", "3] u = 192.168.1.3/32 g = null } \"\"\" )", "config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing')", "dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir,", "\"\"\" ) assert config['a'] == [ [1, 2], [3, 4]", "= ConfigFactory.parse_string( \"\"\" b: 14 \"\"\") config = unresolved.with_fallback(source) assert", "\"\"\" ) # b is not set so show raise", "{tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') ==", "pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import (ConfigException,", "{ a : ${foo.d}, b : 1 } bar.b =", "'z': 0, 'y': -1, 'd': 4} def test_self_ref_child(self): config =", "[ ${b} {a: 1, b: 2}, {a: 3, c: 4}", "\"\"\" ) assert config3.get('a.b.c') == 'str' assert config3.get('d') == 'test", "config = ConfigFactory.parse_string( \"\"\" bar : { foo : 42,", "{'foo': 43, 'baz': 43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self):", "''] assert config.get_bool('t.d') is True assert config.get_int('t.e.y.f') == 7 assert", "'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self):", "5 def test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo']", "\"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list += [ 4,", "[ include \"dummy.txt\" 3 4 ] \"\"\" ) assert config1['a']", "config.get(\"bar\") == {'a': 4, 'b': 3} assert config.get(\"foo\") == {'c':", "== '<PASSWORD>' def test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\" database {", "\"\"\" e : ${a} { } \"\"\", resolve=False ) merged", "')) assert config == { 'no_trailing_ws': \"foo bar \", 'trailing_ws':", "{ foo : 43 } \"\"\" ) assert config.get(\"bar\") ==", "= ${?a} g = ${?c1} ${?c2} h = ${?c1} ${?c2}", "'1 bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\" database {", "pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name')", "${x.y} \"\"\" ) assert config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys()) ==", "3} x = {z: 0} ${x} x = {y: -1}", "= [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g,", "\"\"\" a: 1 b: { pb: 5 } \"\"\") assert", "ConfigFactory.parse_string( \"\"\" b: 14 \"\"\") config = unresolved.with_fallback(source) assert config['foo']", "merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\": 2,", "[3, 4] assert config.get(\"x.y\") == [5, 6] assert config.get(\"x.z\") ==", "expected == config_tree def test_merge_overriden(self): # Adress issue #110 #", "config = ConfigFactory.parse_string( \"\"\" x = {a: 1, b: 2}", "domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö', 'append_failure', append=True) with pytest.raises(ConfigMissingException): config.get_string(u'missing_unicode_key_ö') with", "'python', 'java'] config2 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python]", "config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self):", "config == { 'int_from_env': '5' } assert config.get_int('int_from_env') == 5", "bars} types = {bar['type'] for bar in bars if 'type'", "'say': 'meow' }, 't': 2 } } assert expected ==", "= {a: 1, b: 2} x = ${x} {c: 3}", "comment 9 ] } # comment 10 // comment 11", "] \"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR'] == 'right'", "config.get('b') == 'abc' assert config.get('c') == 'the man' assert config.get('d')", "with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\" ) assert config['with-escaped-backslash'] == '\\n\\\\\\\\\\n'", "fdin: fdin.write(\"{c: 3}\") # add the temp dir to sys.path", "== { 'include-database': True } def test_substitution_override(self): config = ConfigFactory.parse_string(", "\"\"\" database { name = peopledb pass = <PASSWORD> name", "= \"wrong\" compilerCommon : ${common} { VAR : ${var} }", "python] host_modules = ${common_modules} [java] \"\"\" ) assert config.get('host_modules') ==", "''' config = ConfigFactory.parse_string( \"\"\" // bar.a should end up", "\"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" }", "assert config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz') == [\"a\", \"b\"] assert", "a = foo \"bar\" ${b} dummy c = foo ${x}", "{ b: { c = str e = \"str \"", "test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\" common_modules = {a:perl} \\ {b:java}", "def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} {y:", "assert config.get('a.b.d') == 8 def test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\"", "} \"\"\" config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain'", "'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config =", "} }, \"h\": { \"i\": { \"m\": 7, \"d\": 5,", "== 1 and config['b'] == 1 def test_object_field_substitution(self): config =", "one == {'x': [3, 4]} assert two == 2 assert", "8] ] \"\"\" ) assert config['a'] == [ [5, 6,", "{ c = test // comment 0 g = 6", "assert set(config.get(\"x\").keys()) == set(['y']) def test_self_ref_substitution_dict_recurse(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\"", "6 } data-center-east = ${data-center-generic} data-center-east = { name =", "} def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a = ${b}", "test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] = 3 d['apple'] = 4", "assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "mutation on config2 assert \"abc\" not in str(config2) def test_fallback_non_root(self):", "Exception: from datetime import timedelta as period class TestConfigParser(object): def", ": { bar: [\"a\"] } sub : ${base} { baz:", "neg_long2 = 121.22e-3 \"\"\" ) # on python 3 long", "= source.with_fallback(unresolved) assert config['foo'] == 42 def test_fallback_with_resolve(self): config3 =", "= ConfigFactory.parse_string( \"\"\" a = 123 a = ${?test} a", "are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz} bar:", "} \"\"\" ) assert config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\") ==", "6, 1, 2], [3, 4, 5, 6], [1, 2, 5,", "bar \", 'trailing_ws_with_comment': \"foo bar \" } def test_unquoted_strings_with_ws(self): config", "a: { b: { c = 5 } } a.b", "\"\"\" ) assert config == { 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n'", "None assert config.get_string('t.g') is None assert config.get_bool('t.g') is None assert", "import pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from", "# comment 0 # comment 3 a: { # comment", "[ 4, 5, 6 ] \"\"\", resolve=False ) config2 =", "config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\") == \"123\" def test_one_line_quote_escape(self): config", "\"\"\" ) assert config.get('a') == 'a b c' assert config.get('b')", "test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: foo c:", "test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string(", "1} a: {c: 2} b: {c: 3} {d: 4} {", "= [java] ${common_modules} \"\"\" ) assert config2.get('host_modules') == ['java', 'php',", "}} \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == expected_res def test_include_substitution(self): with", "== '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config =", "== -.54 def test_sci_real(self): \"\"\" Test scientific expression of number", "A = ${Test} Test { field1 = 1 field2 =", "+= [1,2] \"\"\" ) assert config.get(\"x\") == [1, 2] def", "[\"b\"] } sub2: ${sub} \"\"\" ) assert config.get_list('base.bar') == [\"a\"]", "config.get(\"bar\") == {'foo': 42, 'baz': 42} assert set(config.keys()) == set(['bar'])", "\\ [python] \"\"\" ) assert config['common_modules'] == ['perl', 'java', 'python']", "set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec ''' config", "a: {c: 2} b: {c: 3} {d: 4} { c:", "def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert config", "config = ConfigFactory.parse_string( \"\"\" a { b = foo c", "test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert config == {", "config3 = ConfigFactory.parse_string( u\"\"\" { a: { b: { c", ": { x : 42 } } \"\"\" ) assert", "} \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"dict\") ==", "\" } def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a =", "${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite}", "b[0] == {\"v1\": 2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1 =", "'5' assert config.get_int('t.c') == 5 assert config.get_float('t.c') == 5.0 assert", "7 } \"\"\" ) assert config['\"a.b.c.d\"'] == 3 assert config['t.d.c']", "assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\"", "'int_from_env': '5' } assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string", "== 42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\",", "= test ${a.b.c} f = test ${a.b.e} } \"\"\" )", "5 } } \"\"\" ) assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set',", "config == expected assert config == json.loads(source) try: from dateutil.relativedelta", "<PASSWORD> } user=test_user pass=<PASSWORD> database { user = ${user} pass", "== [\"a\", \"b\"] assert config.get_list('sub2.baz') == [\"a\", \"b\"] def test_plain_ordered_dict(self):", "[5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} \"4\"') def test_string_substitutions(self):", "Example from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" foo", "resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" e : ${a} {", "test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [ \"a\", \"b\",", "\"\"\" ) assert config.get('foo') == {'a': 2, 'c': 1} assert", ") assert config['x'] == 42 assert config['y'] == 42 @pytest.mark.xfail", "} b = # test # test2 { c: 3,", "set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec '''", "[3,4] \"\"\" ) assert config.get(\"x\") == [1, 2, 3, 4]", "will be an int but on python 2 long with", "\"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence:", "line \\\"\\\"\\\" } j = [1, 2, 3] u =", "= ConfigFactory.parse_string( \"\"\" name: foo \"\"\" ) config5 = ConfigFactory.parse_string(", "= [perl] host_modules = aa ${common_modules} bb \"\"\" ) def", "assert config == { 'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self):", "${a} bar \"\"\") assert config['c'] == 'foo 1' assert config['d']", "def test_int_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: { b:", "${bar.foo} } bar : { foo : 43 } \"\"\"", "}} \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == expected_res config2 = ConfigFactory.parse_string(", "\"\"\" common_modules = {a:perl} \\ {b:java} \\ {c:python} \"\"\" )", "('a: 3 m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3 min',", "} } d = test ${a.b.c} me f = test", "from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" a =", "c : 4 } } o3 = ${o1} ${o2} \"\"\"", "with be a long assert config['short'] == 12 assert isinstance(config['short'],", "test_self_ref_substitution_dict_otherfield(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string(", "c: 3 include \"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert", "\\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\" with-escaped-newline-escape-sequence: \\\"\\\"\\\" \\\\\\\\n \\\"\\\"\\\" \"\"\"", "config = ConfigFactory.parse_string( \"\"\" main_language = php languages = [java,", "= b 7 \"\"\" ) assert config.get('a') == 'a b", "112 millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a: 114 ms',", "cluster-size = 9, opts = \"-Xmx4g\" } \"\"\" ) assert", "relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] ==", "assert config['b'] == -121.22 assert config['c'] == .54 assert config['d']", "c = ${e} } } d = test ${a.b.c} me", "config = ConfigFactory.parse_string( \"\"\" include-database=true \"\"\") assert config == {", "== [3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string( \"\"\" a", "7, \"d\": 5, \"e\": 65 } } } } assert", "d: { pc: ${b.pa} } e: ${b} \"\"\", resolve=True) assert", "= ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz} bar: {r: 1, s:", "// comment 1 # comment 2 { c = test", "42, baz : ${bar.foo} } \"\"\" ) assert config.get(\"bar\") ==", "spec ''' config = ConfigFactory.parse_string( \"\"\" a = ${?a}foo \"\"\"", "root = ConfigFactory.parse_string( \"\"\" a = 1 mid.b = 1", "{ host = \"localhost\" port = 8000 url = ${database.host}\":\"${database.port}", "('a: 3 min', '3 min'), ('a: 4 seconds', period(seconds=4)), ('a:", "[\"a\", \"b\"] assert config.get_list('sub2.baz') == [\"a\", \"b\"] def test_plain_ordered_dict(self): config", "config = root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1 and config['b'] ==", "line1 \\ line2 test2 = test \"\"\") assert config ==", "b c d e\"=test3 \"\"\" ) assert config.get('a') == 1", "f: 7 g: \"hey dude!\" h: hey man i =", "= 5.0\") assert config['foo'] == 5.0 def test_list_substitutions(self): config =", "\"d\": { \"g\": { \"h\": { \"j\": { \"u\": 5", "abc x = ${?x} def ''' config = ConfigFactory.parse_string( \"\"\"", "assert config.get(\"x\") == [-3, -2, -1, 0, 1, 2, 3,", "path here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert", "config = ConfigFactory.parse_string( \"\"\" a: {b: 1} a: {c: 2}", "test ${a.b.e} me } \"\"\" ) assert config3.get('a.b.c') == 'str'", "config = ConfigFactory.parse_string( \"\"\" a = ${?a}foo \"\"\" ) assert", "python] host_modules = [java] ${common_modules} \"\"\" ) assert config2.get('host_modules') ==", "+= [${b2}] \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged)", "config1['a'] == 5 config2 = ConfigFactory.parse_string( \"\"\" { database {", "period(days=11)), ('a: 12 d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a:", "6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a { a:", ": ${a} { d : [ ${b} ] } \"\"\",", "assert config.get(\"x\") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string(", "assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config", "\"${a.b} a.d = baz \"\"\" ) assert config['a.b'] == \"foo\"", "= ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6 }", ") assert config2['a'] == [1, 2] config3 = ConfigFactory.parse_string( \"\"\"", ") assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string(", "= ${?CUSTOM_MSG} \"\"\") assert config == { 'num': 3, 'retries_msg':", "config4.with_fallback(config5) assert config6 == { 'longName': 'long foo', 'name': 'foo'", "resolve=True) assert config == { 'a': 1, 'b': {'pa': [1],", "test = line1 \\ line2 test2 = test \"\"\") assert", "\"bar\"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}')", "'abc\\n\\n' } def test_multi_line_escape(self): config = ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\"", "fdin.write('y = ${x}') fdin.flush() config = ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\"", "database { host = localhost port = 5432 user =", "ConfigFactory.parse_string( \"\"\" short = 12.12321 long1 = 121.22E3423432 neg_long1 =", "config.get(\"x\") == \" def\" def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigTree) from pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta", "[\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}]", "{ 'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string(", "\"\"\" ) expected = { 'a': { 'garfield': { 'say':", "STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR}", "= test ${a.b.e} me } \"\"\" ) assert config3.get('a.b.c') ==", "test ${a.b.c} me } \"\"\" ) assert config3.get('a.b.c') == 5", "list += [ 4, 5, 6 ] \"\"\", resolve=False )", "assert config.get(\"bar\") == {'foo': 42, 'baz': 42} assert set(config.keys()) ==", "tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf')", "test a = foo \"bar\" ${b} dummy c = foo", "= ConfigFactory.from_dict(d) assert config == d def test_object_concat(self): config =", "test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo \"bar\" dummy", "test_self_ref_substitution_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a: 1, b:", "\\ line2 test2 = test \"\"\") assert config == {", "bar.a should end up as 4 bar : { a", "test_include_missing_file(self): config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"dummy.txt\" 3", "config3 = config1.with_fallback(config2) assert config3['a'] == { 'b': 1, 'c':", "4, 'b': 3} assert config.get(\"foo\") == {'c': 3, 'd': 4}", "json import os import shutil import tempfile from collections import", "121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2 = 121.22e-3", "issue #102 config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\" #", "[1, 2] [3, 4] [ 5, 6 ] \"\"\" )", "TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t = { c", "= { name = \"east\" } data-center-east = ${data-center-generic} \"\"\"", "5 \"\"\" ) assert config1['a'] == 5 config2 = ConfigFactory.parse_string(", "1, s: 2} baz: {s: 3, t: 4} \"\"\") assert", "common : { } b1 = [] var = \"wrong\"", "121.22 b = -121.22 c = .54 d = -.54", ") assert config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz') == [\"a\", \"b\"]", "{'c': 3, 'd': 4} assert set(config.keys()) == set(['bar', 'foo']) def", "[${b2}] \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b", "} @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR", "${?c} 4 e = ${?a} g = ${?c1} ${?c2} h", "${base} { baz: ${base.bar} [\"b\"] } sub2: ${sub} \"\"\" )", "{ VAR : \"right\" } b1 = [ ${compilerCommon} ${substrate-suite}", "assert config.get('b')[0] is None def test_parse_override(self): config = ConfigFactory.parse_string( \"\"\"", ") config2 = ConfigFactory.parse_string( \"\"\" a.b = 4 a.d =", "host_modules = ${common_modules} 55 \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\"", "merged.get(\"b\") assert len(b) == 1 assert b[0] == {\"v1\": 2,", "[perl] host_modules = ${common_modules} aa \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", "4} assert set(config.keys()) == set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example", "'`', '^', '?', '!', '@', '*', '&']) def test_parse_forbidden_characters_quoted(self, forbidden_char):", "\"\"\" x = {a: 1} x += {b: 2} \"\"\"", "2, 3] u = 192.168.1.3/32 g = null } \"\"\"", "host_modules = ${common_modules} [java] \"\"\" ) assert config.get('host_modules') == ['php',", "= ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert {'a':", "c: 3 d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] ==", "b = # test # test2 [ 3, 4,] c", "timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a:", ": { a : 2 } \"\"\" ) assert config.get('foo')", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} def \"\"\" )", "config6 == { 'longName': 'long foo', 'name': 'foo' } def", "man' assert config.get('d') == 'woof' assert config.get('a-b-c-d') == 'test' assert", "{'e': 5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "1' assert config['d'] == '1 bar' def test_substitution_nested_override(self): config =", "\", 'trailing_ws_with_comment': \"foo bar \" } def test_unquoted_strings_with_ws(self): config =", "relativedelta(years=2)), ('a: 3 y', relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def", "5, 6] assert config.get_list('a') == [1, 2, 3, 4, 5,", ": { a : 1 b : 2 } }", "${a} \"\"\" ) assert config == { 'a': 5, 'b':", "5) assert 5 == config_tree.pop('a.c', 5) expected = { 'a':", "= ConfigFactory.parse_string( \"\"\" STRING_VAR = ${STRING_VAR} \"\"\") assert config ==", "{ pc: ${b.pa} } e: ${b} \"\"\", resolve=True) assert config", "test_merge_overriden(self): # Adress issue #110 # ConfigValues must merge with", "config.get('t.e.y.h') == 'hey man' assert [v.strip() for v in config.get('t.e.y.i').split('\\n')]", "4, \"k\": \"f d\" } } }, \"h\": { \"i\":", "a: [ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] ==", "3 def test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b:", "5 } } d = test ${a.b.c} me } \"\"\"", "test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR} \"\"\") assert", "42 assert config['a']['y'] == 42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string(", "2, 3, 4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n')", "6 assert config.get('data-center-east.name') == 'east' config2 = ConfigFactory.parse_string( \"\"\" data-center-generic", "[x.strip() for x in config['common_modules'].split() if x.strip(' ') != '']", "\"\"\" ) assert config.get(\"x.y\") == {'z': 1} assert config.get(\"x.z\") ==", "2} x = ${x} {c: 3} x = {z: 0}", "[1, 2] assert config['b'] == [3, 4] assert config['c'] ==", "= 433 } } \"\"\" ) assert config2['database']['host'] == 'other.host.net'", "} } \"\"\" ) assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [", "= config2.with_fallback(config1) assert config2.get(\"dict\") == {'x': 1, 'y': 2} def", "== [1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\" x :", "assert config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\")", ") merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert", "long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432 neg_long2", "== { 'a': 'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config", "== 'foo 1' assert config['d'] == '1 bar' def test_substitution_nested_override(self):", "config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name)", "config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( \"\"\" data-center-generic = {", "'test 5' config3 = ConfigFactory.parse_string( \"\"\" { a: { b:", "= [perl] host_modules = aa ${common_modules} bb \"\"\" ) with", "assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON", "= {name = \"east\"} ${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size') ==", "test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def", "a = [1, 2] [3, 4] [ 5, 6 ]", "assert config.get(\"x\") == \"abc def\" def test_self_append_non_existent_string(self): ''' Should be", "4,} c { e: 5, f: 6 } \"\"\" )", "\" } } d = test ${a.b.c} me f =", "\"d\": 4, \"k\": \"f d\" } } }, \"h\": {", "application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] == [", "'d': 4} assert config['c'] == {'e': 5, 'f': 6} def", "= ConfigFactory.parse_string( \"\"\" a = { b: 1 c: 2", "python 2 long with be a long assert config['a'] ==", "config.get('c') == 'test' assert config.get('g') == '6 test' assert config.get('a.b')", "= {b: 5} \"4\"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\"", "(one, two, three) = config.get(\"x\") assert one == {'x': [3,", ".with_fallback(config3) assert {'a': 5, 'b': 5, 'c': 5} == config1", "a { d { g.h.j.u: 5 g { h.d: 4", "test_unicode_dict_key(self): input_string = u\"\"\" www.sample.com { us { name =", "Example from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" bar", "period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a:", ") assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules') == ['java',", "hours', period(hours=7)), ('a: 8 hour', period(hours=8)), ('a: 9 h', period(hours=9)),", "bar.b = 3 // foo.c should end up as 3", "assert config['a'] == [1, 2] assert config['b'] == [3, 4]", "{b: 5} \"4\"') def test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" {", "sub2: ${sub} \"\"\" ) assert config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz')", "assert config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\")", "2 month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)),", "HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" bar : {", "os.path.exists(path) def test_resolve_package_path_format(self): with pytest.raises(ValueError): ConfigParser.resolve_package_path(\"pyhocon/config_parser.py\") def test_resolve_package_path_missing(self): with pytest.raises(ImportError):", "is not set so show raise an exception with pytest.raises(ConfigMissingException):", "# Adress issue #110 # ConfigValues must merge with its", "5 me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\" { a:", "= ConfigFactory.parse_string( \"\"\" a: foo b: 10 weeks c: bar", "= ConfigFactory.parse_string( \"\"\" common_modules = perl \\ java \\ python", ") assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east' assert", ": 2 } foo.d = 4 \"\"\" ) assert config.get(\"bar\")", "a b c d=test2, \"a b c d e\"=test3 \"\"\"", "] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def", ") assert config['a'] == 4 assert config['b'] == 5 assert", "['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string(", "1 def test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\" A = ${Test}", "# test2 [ \"c\", \"d\",] c = [ \"e\", \"f\"", ") assert config3['a'] == [1, 2] def test_include_missing_file(self): config1 =", "fdin: fdin.write('{ x : 10, y : ${x} }') fdin.flush()", "config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with", "set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec ''' config", "config == [] def test_include_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say')", "} @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\" string_from_env", "ConfigFactory.parse_string( \"\"\" x = {a: 1, b: 2} x =", "5 b = test a = foo \"bar\" ${b} dummy", "\"\"\" x = {y: {y: 1}} x = ${x.y} \"\"\"", "== set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): ''' Example from HOCON spec '''", "# use unicode path here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2", "assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!',", "merged.get(\"b1\") == {\"v1\": 2, \"v2\": 3} b = merged.get(\"b\") assert", "= ConfigFactory.parse_string( \"\"\" list = ${list} [ 4, 5, 6", "\"f d\" } } }, \"h\": { \"i\": { \"m\":", "config2 = ConfigFactory.parse_string( \"\"\" b1 : { v1: 2, v2:", "end up as 4 bar : { a : ${foo.d},", "'type' in bar} print(types, '(((((') assert '<NAME>' in names assert", "${x}') fdin.flush() config = ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x =", "config = ConfigFactory.parse_string( \"\"\" a = 4 b = #", "= config1.with_fallback(config2) assert config3['a'] == { 'b': 1, 'c': 2,", "a.c = ${a.b}\" \"${a.b} a.d = baz \"\"\" ) assert", "not config1 # test no mutation on config2 assert \"abc\"", "[1, 2] [3, 4] ${b} [1, 2] ${b} [7, 8]", "\"\"\" bool_from_env = ${TRUE_OR_FALSE} \"\"\") assert config == { 'bool_from_env':", "= <PASSWORD> } user=test_user pass=<PASSWORD> database { user = ${user}", "ParseBaseException, ParseException, ParseSyntaxException import mock import pytest from pyhocon import", "t { \"d\": { \"c\": 5 } } k {", "[ ${b} ] } \"\"\", resolve=False ) config2 = ConfigFactory.parse_string(", "me f = test ${a.b.e} me } \"\"\" ) assert", "== set(['bar', 'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec", "3, 'z': 0, 'y': -1, 'd': 4} def test_self_ref_child(self): config", "3 d['apple'] = 4 d['pear'] = 1 d['tree'] = {", "assert config.get('c') == 'b 7' def test_concat_list(self): config = ConfigFactory.parse_string(", "${a.b.c} me f = test ${a.b.e} me } \"\"\" )", "d: 4,} c { e: 5, f: 6 } \"\"\"", "\"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\",", "= ConfigFactory.parse_string( \"\"\" a { a: 1, b: 2, }", "1 field2 = ${Test.field1}\"2\" field3 = ${Test.field2}\"3\" } \"\"\" )", "{'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\" x =", "== { 'a': 1, 'b': {'pa': [1], 'pb': [1]}, 'c':", "names assert 'milk' in types def test_list_of_dicts(self): config = ConfigFactory.parse_string(", "test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a = ${b} b =", "${?c2} 1 \"\"\") assert 'b' not in config assert config['d']", "2, 3]\") assert config == [1, 2, 3] def test_quoted_key_with_dots(self):", "{x: [3,4]} x = {y: [5,6]} x = {z: ${x}}", "[ \"c\", \"d\",] c = [ \"e\", \"f\" ] \"\"\"", "${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right' assert", "= bar\") assert config['foo'] == 'bar' def test_dos_chars_with_quoted_string_noeol(self): config =", "can be discovered monkeypatch.syspath_prepend(temp_dir) # load the config and include", "assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') == ['java', 'php',", "\"\"\" b2 : ${x} {v2: 3} b += [${b2}] \"\"\",", "] \"\"\" ) assert config['a'] == [ [1, 2], [3,", ") config5 = ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name} \"\"\", resolve=False", "'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException): config.put(u'www.example-ö',", "foo: ${bar} foo: ${baz} bar: {r: 1, s: 2} baz:", "'\\t', 'no-tab': '\\\\t', 'newline': '\\n', 'no-newline': '\\\\n', 'cr': '\\r', 'no-cr':", "== expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('y =", "config['with-escaped-backslash'] == '\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] ==", "def test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\" a = 1 mid.b", "assert config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is None assert config.get_float('t.g')", "a: 1, b: 2, } b # test # test2", "} o3 = ${o1} ${o2} \"\"\" ) assert config.get_int('o1.foo.b') ==", "[ 5, 6 ] \"\"\" ) assert config['a'] == [1,", "ConfigFactory.parse_string( \"\"\" a: [ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert", "= ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config =", "4} ${b}, {a: 3} ${b} {c: 6}, ] \"\"\" )", "= ConfigFactory.parse_string( \"\"\" a: 1 b: ${c} { pa: [${a}]", "0 # comment 3 a: { # comment 4 b:", "str e = \"str \" } } d = ${a.b.c}", "{ 'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self):", ".overriden_value # if both are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo:", "config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] ==", "config2['a'] == [1, 2] config3 = ConfigFactory.parse_string( \"\"\" a: [", "'b': '55', 'c': '5 5' } def test_dict_substitutions(self): config =", "= ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g,", "{ f: 7 g: \"hey dude!\" h: hey man i", "] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent}", "\"\"\".format(tmp_file=fdin.name) ) assert config['x'] == 42 assert config['y'] == 42", "d=test2, \"a b c d e\"=test3 \"\"\" ) assert config.get('a')", "== 7 assert config.get('a.b.d') == 8 def test_concat_dict(self): config =", "assert b[0] == {\"v1\": 2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1", "is None assert config.get_list('t.g') is None assert config.get_config('t.g') is None", "= {y: {z: 1}} x = ${x.y} \"\"\" ) assert", "in bars if 'type' in bar} print(types, '(((((') assert '<NAME>'", "fdin: fdin.write('{a: 1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\"", "data-center-generic = { cluster-size = 6 } data-center-east = {name", "that 'my_module' can be discovered monkeypatch.syspath_prepend(temp_dir) # load the config", "comment 5 } # comment 6 t = [1, #", "[php, python] host_modules = ${common_modules} [java] \"\"\" ) assert config.get('host_modules')", "= \"east\"} ${data-center-generic} { cluster-size = 9, opts = \"-Xmx4g\"", "\"\"\") assert config == { 'int_from_env': '5' } assert config.get_int('int_from_env')", "month', relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a:", "== expected_res config3 = ConfigFactory.parse_string( \"\"\" a: {{ c: 3", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} {y: 1} x", "config = ConfigFactory.parse_string(\"foo = bar\") assert config['foo'] == 'bar' def", "('a: 12 d', period(days=12)), ('a: 110 microseconds', period(microseconds=110)), ('a: 111", "{ name = \"east\" } \"\"\" ) assert config5['data-center-east'] ==", "'python', 'perl'] assert config4.get('full_modules') == ['java', 'php', 'python', 'perl', 'c',", "test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: { pb:", "foo\" assert config['a.d'] == \"baz\" def test_comma_to_separate_expr(self): config = ConfigFactory.parse_string(", "period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a:", "${x} }') fdin.flush() config = ConfigFactory.parse_string( \"\"\" { a :", "ConfigFactory.parse_string( \"\"\" // comment 1 # comment 2 { c", "\"\"\" database { host = localhost port = 5432 user", "with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}') def test_include_file(self): with tempfile.NamedTemporaryFile('w') as", "a: [ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] ==", "config.get_list('sub.baz') == [\"a\", \"b\"] assert config.get_list('sub2.baz') == [\"a\", \"b\"] def", "def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: foo", "6 } data-center-east = {name = \"east\"} ${data-center-generic} { cluster-size", "5 \"\"\") assert config == { 'a': 'abc', 'c': 5", "6 \"\"\" ) assert config['a'] == 4 assert config['b'] ==", "assert 'g' not in config assert config['h'] == 1 def", "bb \"\"\" ) def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\" x", "5 assert config3.get('d') == 'test 5 me' def test_cascade_string_substitutions(self): config", "config2 assert \"abc\" not in str(config2) def test_fallback_non_root(self): root =", "] } \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" e", "{'a': 4, 'b': 3} assert config.get(\"foo\") == {'c': 3, 'd':", "= foo c = bar } a.c = ${a.b}\" \"${a.b}", "== 'meow' def test_include_glob_dict_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_animals.conf\") assert config.get('animals.garfield.say') ==", "# test2 \"b\" c = \"c\" \"\"\" ) assert config['a']", "assert config['\"a.b.c.d\"'] == 3 assert config['t.d.c'] == 5 assert config['k.\"b.f.d\"']", "${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon}", "3 d['apple'] = 4 d['pear'] = 1 d['orange'] = 2", "data-center-east = {name = \"east\"} ${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size')", "assert config.get(\"x.x\") == [3, 4] assert config.get(\"x.y\") == [5, 6]", ") assert config1['a'] == [3, 4] def test_include_required_file(self): config =", "-121.22 c = .54 d = -.54 \"\"\" ) #", "c b = 5 b c = b 7 \"\"\"", "is None assert config.get('b')[0] is None def test_parse_override(self): config =", "test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\" x : { v1: 1", "foo bar \"\"\") assert config == { 'a': 'foo bar'", "config1 = ConfigFactory.parse_string( \"\"\" a : { } b :", "os import shutil import tempfile from collections import OrderedDict from", "def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR} \"\"\")", "ConfigFactory.parse_string( u\"\"\" { a: { b: { c = str", "== 'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second domain' with pytest.raises(ConfigWrongTypeException):", "test_multiline_with_backslash(self): config = ConfigFactory.parse_string( \"\"\" test = line1 \\ line2", "# test no mutation on config2 assert \"abc\" not in", "= 5 \"\"\") assert config == { 'a': 'abc', 'c':", "assert config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\"", "== 121.22E3423432 assert config['neg_long2'] == 121.22E-3 def test_assign_strings_with_eol(self): config =", "ConfigFactory.parse_string( \"\"\" x += {a: 1} \"\"\" ) assert config.get(\"x\")", "== {'c': 3, 'd': 4} assert config['c'] == {'e': 5,", "= {y: [5,6]} x = {z: ${x}} \"\"\" ) assert", "pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def test_with_comment_on_last_line(self):", "strings are in the same format as JSON strings, See:", "def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1", "c: 4}, ] \"\"\" ) assert config['a'] == [ {'a':", "config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\" database", "ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the man, d=woof, a-b-c-d=test, a b", "1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)),", "resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string(", "config = ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t { \"d\": {", "== config['quoted'] assert 'abc\"test' == config['unquoted'] def test_escape_quote_complex(self): config =", "5, 6, 7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules", "} b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] b2", "assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') == 4 def test_issue_75(self):", "{z: 0} ${x} x = {y: -1} ${x} {d: 4}", "config.get('a') == 'a b c' assert config.get('b') == '5 b'", "42 } } \"\"\" ) assert config['a']['x'] == 42 assert", "${b}, {a: 3} ${b} {c: 6}, ] \"\"\" ) assert", "} misc = \"mist\" } \"\"\" ) # use unicode", "42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x", "'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\" b1 :", "= {name = \"east\"} ${data-center-generic} data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\"", "\"\"\") assert config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR'] == 'right' def", "= 5 } } d = test ${a.b.c} } \"\"\"", "HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" foo : {", "config['a']['x'] == 42 assert config['a']['y'] == 42 def test_var_with_include_keyword(self): config", "peopledb pass = <PASSWORD> } database { name = ${?user}", "\"\"\" a = { b: 1 c: 2 } \"\"\"", "[ 3, 4,] c = [ 5, 6 ] \"\"\"", "= ${?c} d = ${?c} 4 e = ${?a} g", "\"\"\" int_from_env = ${INT_VAR} \"\"\") assert config == { 'int_from_env':", "\"\"\" A = ${Test} Test { field1 = 1 field2", "test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\"", "[ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == [1,", "with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException): config.get_string(u'www.example-ö.com.us.name.missing') def", "d') == 'test2' assert config.get('a b c d e') ==", "\"\"\" x = {x: [3,4]} x = [${x}, 2, 3]", "('a: 3mo', relativedelta(months=3)), ('a: 3 mon', '3 mon'), ('a: 1", "def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\" dict = { x:", "long with be a long assert config['a'] == 121.22 assert", "'d': {'foo': 'bar'}} def test_concat_multi_line_string(self): config = ConfigFactory.parse_string( \"\"\" common_modules", "are in the same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json", "minutes,] c: bar \"\"\" ) assert config['b'] == ['a', 1,", "\"\"\" ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "a: [ include \"dummy.txt\" 3 4 ] \"\"\" ) assert", "= 2 } \"\"\" ) assert expected == config2 def", "def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\" x = {y: {y:", "} @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\" bool_from_env", "import relativedelta as period except Exception: from datetime import timedelta", "'long foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string(", "\"{tmp_file}\" c: 3 d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config1['a']", "f: 5 g } \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a = {g}')", "1}} x = ${x.y} \"\"\" ) assert config.get(\"x.y\") == {'z':", "= {bar['type'] for bar in bars if 'type' in bar}", "'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\" x", "on python 2 long with be a long assert config['short']", "host = \"other.host.net\" port = 433 } } \"\"\" )", "= 1 \"\"\" ) config = root.get_config(\"mid\").with_fallback(root) assert config['a'] ==", "\"\"\" common_modules = ${non_existent} abc \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "[1, 2, 3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 =", "} database { name = ${?user} pass = ${?pass} }", "('a: 2 minute', period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a: 3m',", "2 } \"\"\" ) assert config.get('foo') == {'a': 2, 'c':", "\"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2] config2", "== 42 assert config['a']['y'] == 42 def test_var_with_include_keyword(self): config =", "+= [3,4] \"\"\" ) assert config.get(\"x\") == [1, 2, 3,", "123 a = ${?test} a = 5 \"\"\" ) assert", "\"dummy.txt\" 3 4 ] \"\"\" ) assert config1['a'] == [3,", "abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert config == { 'test_no_quotes':", "foo : { a : { c : 1 }", "\"1\" assert config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\") == \"123\" def", "app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\"", "@pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*', '&']) def", "== { 'num': 3, 'retries_msg': 'You have 3 retries' }", "2], [3, 4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\"", "{perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {f: 5}", "b = # test # test2 \"b\" c = \"c\"", "} def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\" { data-center-generic =", "\"second domain\" } } \"\"\" config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name')", "4 ] \"\"\" ) def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert", "assert config.get_string('t.g') is None assert config.get_bool('t.g') is None assert config.get_list('t.g')", "pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a", "[3, 4] ] \"\"\" ) assert config['a'] == [ [1,", "config['a'] == [ {'a': 1, 'b': 2}, {'a': 3, 'c':", "= ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self):", "assert config.get('data-center-east.cluster-size') == 6 assert config.get('data-center-east.name') == 'east' config2 =", "testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf') assert config2 == { 'data-center-generic':", "'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\" main_language = php", "== 1 assert config.get('b') == 'abc' assert config.get('c') == 'the", "== 'other.host.net' assert config2['database']['port'] == 433 assert config2['database']['url'] == 'other.host.net:433'", "ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self): config =", "b = test a = foo \"bar\" ${b} dummy c", "def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\" x = 5 b", "4] assert config['c'] == [5, 6] def test_assign_list_strings_with_eol(self): config =", "assert config2.get(\"list\") == [1, 2, 3, 4, 5, 6] def", "show raise an exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config", "= 12 long = 12321321837612378126213217321 negative = -15 \"\"\" )", "'d': 'foo 5 43' } def test_complex_substitutions(self): config = ConfigFactory.parse_string(", "string = ${string}def \"\"\", resolve=False ) result = config2.with_fallback(config1) assert", "b c d=test2, \"a b c d e\"=test3 \"\"\" )", "'\\r', 'no-cr': '\\\\r', 'windows': 'c:\\\\temp', } config = ConfigFactory.parse_string(source) assert", "b[1] == {'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 =", "period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\" a: foo b:", "compilerCommon : ${common} { VAR : ${var} } substrate-suite: {", "from HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" foo :", "'a': 'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string(", "test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [perl] \\ [java]", "resolve=False ) result = config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef' #", "foo.c should end up as 3 foo : { c", "\"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\" dict =", "test_escape_quote(self): config = ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\")", "${?user} pass = ${?pass} } \"\"\") assert config['database.name'] == 'peopledb'", "me' def test_cascade_string_substitutions(self): config = ConfigFactory.parse_string( \"\"\" { a: {", ": 3 c : 4 } } o3 = ${o1}", "assert config == d def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana']", "\" trailing_ws = \"foo\" \"bar \"{ws} trailing_ws_with_comment = \"foo\" \"bar", "is None @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!', '@', '*',", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b:", "\"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\")", "app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" ) assert config.get('app.java_opts')", "[3, 4] ] def test_list_of_dicts_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b", "common_modules = [php, python] host_modules = ${common_modules} [java] \"\"\" )", "baz: {s: 3, t: 4} \"\"\") assert 'r' in config_tree['foo']", "[ 1, 2, 3 ] \"\"\" ) config2 = ConfigFactory.parse_string(", "'java', 'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size')", "4,} c = { e: 5, f: 6 } \"\"\"", "== ['php', 'python'] assert config3.get('host_modules') == ['java', 'php', 'python', 'perl']", "5, 'f': 6} def test_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a", "types = {bar['type'] for bar in bars if 'type' in", "} \"\"\" ) assert config['a'] == {'a': 1, 'b': 2}", "== 3 def test_attr_syntax(self): config = ConfigFactory.parse_string( \"\"\" a: 1", "assert config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } def", "c = ${a} \"\"\") def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\"", "5, 6 ] \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert", "assert one == {'x': [3, 4]} assert two == 2", "= ConfigFactory.parse_string( \"\"\" dict = { x: 1 } \"\"\"", "45 assert 'g' not in config assert config['h'] == 1", "ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert 'abc\"test' ==", "config == { 'include-database': True } def test_substitution_override(self): config =", "config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\") == \"12\" assert config.get_string(\"A.field3\") ==", "test # test2 { c: 3, d: 4,} c {", "{ pa: [${a}] pb: ${b.pa} } c: { } d:", "= \"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} \"4\"')", "'longName': 'long foo', 'name': 'foo' } def test_fallback_substitutions_overwrite_file(self): config1 =", "config.get_float('t.g') is None assert config.get_string('t.g') is None assert config.get_bool('t.g') is", "ConfigFactory.parse_string(\"foo = 5\") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config", "relativedelta(months=2)), ('a: 3 mo', relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3", "'128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC' ]", "a.b = 4 a.d = 3 \"\"\" ) config3 =", "[1, 2] [3, 4] ] \"\"\" ) assert config['a'] ==", "= ConfigFactory.parse_string( \"\"\" quoted: \"abc\\\\\"test\" unquoted: abc\\\\\"test \"\"\") assert 'abc\"test'", "x = ${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self): ''' Example from", "data-center-east-prod = ${data-center-east} {tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size') == 6", "'-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\"", "5432 user = people name = peopledb pass = <PASSWORD>", "== \"12\" assert config.get_string(\"A.field3\") == \"123\" assert config.get_string(\"Test.field1\") == \"1\"", "# create the module folder and necessary files (__init__ and", "python 2 long with be a long assert config['short'] ==", "\"east\" } data-center-east = ${data-center-generic} \"\"\" ) assert config6['data-center-east'] ==", "test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\" a { d { g.h.j.u:", "} \"\"\") expected_result = { \"a\": { \"d\": { \"g\":", "\"\"\" ) assert config.get(\"bar\") == {'foo': 43, 'baz': 43} assert", "14 \"\"\") config = unresolved.with_fallback(source) assert config['foo'] == 42 config", "} \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert", "1, 'b': 2, 'f': 4}, {'a': 3, 'c': 4, 'f':", "[java] ${common_modules} [perl] \"\"\" ) assert config3.get('common_modules') == ['php', 'python']", "config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\" # DO NOT", "def \"\"\" ) assert config.get(\"x\") == \"abc def\" def test_self_append_non_existent_string(self):", "== [-3, -2, -1, 0, 1, 2, 3, 4, 5,", "'a': 5, 'b': '55', 'c': '5 5' } def test_dict_substitutions(self):", "milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)),", "9 assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4", "assert config3.get('a.b.c') == 'str' assert config3.get('d') == 'test str me'", "try: from dateutil.relativedelta import relativedelta as period except Exception: from", "as fdin: fdin.write(\"{c: 3}\") # add the temp dir to", "= [-1, 0] ${x} [5, 6] x = [-3, -2]", "5 } } d = test ${a.b.c} } \"\"\" )", "\\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert {'a': 5, 'b': 5,", "= [ 1, 2, 3 ] \"\"\" ) config2 =", "g = ${?c1} ${?c2} h = ${?c1} ${?c2} 1 \"\"\")", "2} baz: {s: 3, t: 4} \"\"\") assert 'r' in", "config = ConfigFactory.parse_string( \"\"\" a=1, b=\"abc\", c=the man, d=woof, a-b-c-d=test,", "'<PASSWORD>' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b:", "is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\"", "1 b: foo c: ${a} ${b} c: ${b} ${a} d:", ") config2 = config2.with_fallback(config1) assert config2.get(\"list\") == [1, 2, 3,", "ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n \\\"\\\"\\\"", "7 assert config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h') == 'hey", "x = {z: ${x}} \"\"\" ) assert config.get(\"x.x\") == [3,", "def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string( \"\"\" a = { b:", "2 assert b[0] == {'v2': 2} assert b[1] == {'v1':", "config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } config6 =", "a = { f: 5 g } \"\"\") with pytest.raises(ParseSyntaxException):", "[php, python] host_modules = [java] ${common_modules} [perl] \"\"\" ) assert", "not in str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\" a", "] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def", "config5 = ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name} \"\"\", resolve=False )", "\"\"\" main_language = php languages = [java, ${main_language}] \"\"\" )", "man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a:", "config assert config['d'] == 4 assert config['e'] == 45 assert", "} database { host = \"other.host.net\" port = 433 }", "config assert config['h'] == 1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string(", "\"\"\" ) assert [x.strip() for x in config['common_modules'].split() if x.strip('", "config.get('t.u') == '192.168.1.3/32' assert config.get_int('t.g') is None assert config.get_float('t.g') is", "config = ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo'] == 5.0 def", "int) assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative']", "config = ConfigFactory.parse_string(\"[1, 2, 3]\") assert config == [1, 2,", "assert config4.get('data-center-east-prod.cluster-size') == 6 assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 =", "} \"\"\" ) assert expected == config2 def test_include_missing_required_file(self): with", "{y: -1} ${x} {d: 4} \"\"\" ) assert config.get(\"x\") ==", "ConfigFactory.parse_string( \"\"\" { data-center-generic = { cluster-size: 8 } misc", "== ['php', 'python'] assert config4.get('host_modules') == ['java', 'php', 'python', 'perl']", "= 4 a.d = 3 \"\"\" ) config3 = config1.with_fallback(config2)", "42, 'baz': 42} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in(self): '''", "= 5 b=${a}${a} c=${a} ${a} \"\"\" ) assert config ==", "config.get_string(u'missing_unicode_key_ö') with pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name')", "('a: 1 minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2 minute',", "assert config.get('host_modules') == ['php', 'python', 'java'] config2 = ConfigFactory.parse_string( \"\"\"", "'trailing_ws_with_comment': \"foo bar \" } def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string(", "config1['a'] == expected_res config2 = ConfigFactory.parse_string( \"\"\" a: {{ c:", "\\\"\\\"\\\" } j = [1, 2, 3] u = 192.168.1.3/32", "[ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == [1,", "open(module_conf, 'w') as fdin: fdin.write(\"{c: 3}\") # add the temp", "unresolved = ConfigFactory.parse_string( \"\"\" foo: 42 foo: ${?a} \"\"\", resolve=False)", "('a: 114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000", "('a: 111 microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a: 113", "domain\" } } \"\"\" config = ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') ==", "\"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" e : ${a}", "${common_modules} [perl] \"\"\" ) assert config3.get('common_modules') == ['php', 'python'] assert", "${?x} def ''' config = ConfigFactory.parse_string( \"\"\" x = abc", "[1, 2] config2 = ConfigFactory.parse_string( \"\"\" a: [ include file(\"{tmp_file}\")", "test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" num = 3 retries_msg =", "{{ c: 3 include \"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name) )", "[ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == [1,", "config2.get(\"list\") == [1, 2, 3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self):", "= 5432 user = people name = peopledb pass =", "assert expected_result == config def test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\"", "assert config2['a'] == expected_res config3 = ConfigFactory.parse_string( \"\"\" a: {{", "name = \"east\" } data-center-east = ${data-center-generic} \"\"\" ) assert", "e = ${?a} g = ${?c1} ${?c2} h = ${?c1}", "\"\"\" common : { } b1 = [] var =", "ConfigFactory.parse_string( \"\"\" b2 : ${x} {v2: 3} b += [${b2}]", "config = ConfigFactory.parse_string( \"\"\" x = [1,2] x = {x:", "def test_parse_with_enclosing_brace(self): config = ConfigFactory.parse_string( \"\"\" { a: { b:", "test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\" list = [ 1, 2,", "retries_msg = You have ${num} retries retries_msg = ${?CUSTOM_MSG} \"\"\")", "\"\"\" // comment 1 # comment 2 { c =", "#102 config_tree = ConfigFactory.parse_string(\"\"\" foo: \"1\" bar: \"2\" # DO", "a = 45 b = ${?c} d = ${?c} 4", "= ConfigFactory.parse_string( \"\"\" a = 1 mid.b = 1 \"\"\"", "file from 'my_module' config = ConfigFactory.parse_string( \"\"\" a: 1 b:", "two, three) = config.get(\"x\") assert one == {'x': [3, 4]}", "assert 'r' in config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s']", "= 6 } data-center-east = {name = \"east\"} ${data-center-generic} \"\"\"", ": 42, baz : ${bar.foo} } bar : { foo", "= [${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b2", "b: {c: 3} {d: 4} { c: 5 } \"\"\"", "{a: 1} \"\"\" ) assert config.get(\"x\") == {'a': 1} def", ") def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec ''' config", "with its .overriden_value # if both are ConfigTree config_tree =", "with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self,", "('a: 1140000 ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config =", "1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\" string", "'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\" common_modules =", "test # test2 5 c = 6 \"\"\" ) assert", "test_substitution_list_with_append(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.large-jvm-opts =", "6 t = [1, # comment 7 2, # comment", "= 7 d = 8 } } \"\"\" ) assert", "'\"' + fdin.name + \"\"\"\" } a : { x", "c = bar } a.c = ${a.b}\" \"${a.b} a.d =", "[3,4] x = [-1, 0] ${x} [5, 6] x =", "!= ''] == ['perl', 'java', 'python'] def test_concat_multi_line_list(self): config =", "a : { } b : 1 c : ${a}", "foo : 43 } \"\"\" ) assert config.get(\"bar\") == {'foo':", "'__init__.py'), 'a').close() with open(module_conf, 'w') as fdin: fdin.write(\"{c: 3}\") #", "test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\" x = {y: {z: 1}}", "} } } assert expected_result == config def test_parse_with_comments(self): config", "assert config['a'] == 'a' assert config['b'] == 'b' assert config['c']", "1, 'b': 2, 'c': 3, 'z': 0, 'y': -1, 'd':", "-Xmx${app.heap_size}m ] \"\"\" ) assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m'", "config.get_bool('t.g') is None assert config.get_list('t.g') is None assert config.get_config('t.g') is", "x = 5 b = test a = foo \"bar\"", "period(hours=7)), ('a: 8 hour', period(hours=8)), ('a: 9 h', period(hours=9)), ('a:", "3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\"", "b1 : {v2: 2 } b = [${b1}] \"\"\", resolve=False", "\"right\" } b1 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ]", ": 42 } } \"\"\" ) assert config['a']['x'] == 42", "test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self): config = ConfigFactory.parse_string( \"\"\" a: foo b: [a, 1,", "[perl] host_modules = aa ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", "[-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"]", "== 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\"", "c = 6 \"\"\" ) assert config['a'] == 4 assert", "assert config.get('c') == 'the man' assert config.get('d') == 'woof' assert", "{'cluster-size': 8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC',", "${Test.field1}\"2\" field3 = ${Test.field2}\"3\" } \"\"\" ) assert config.get_string(\"A.field1\") ==", "= 6 \"\"\" ) assert config['a'] == 4 assert config['b']", "3} ${b} {c: 6}, ] \"\"\" ) assert config['a'] ==", "'retries_msg': 'You have 3 retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException):", "config6 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "test_concat_string(self): config = ConfigFactory.parse_string( \"\"\" a = a b c", "e:65 } } \"\"\") expected_result = { \"a\": { \"d\":", "\"{tmp_file}\" d: 4 }} \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == expected_res", "assert config.get(\"x\") == {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self): config", "= ConfigFactory.parse_string( \"\"\" a: [ [1, 2] [3, 4] ]", "\"\"\" ) assert expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError):", "3 4 ] \"\"\" ) assert config1['a'] == [3, 4]", "] \"\"\" ) assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ]", "= localhost port = 5432 user = people name =", "= abc \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" string =", "== 'test 5' config3 = ConfigFactory.parse_string( \"\"\" { a: {", "#110 # ConfigValues must merge with its .overriden_value # if", "= test \"\"\") assert config == { 'test': 'line1 line2',", "[perl] \\ [java] \\ [python] \"\"\" ) assert config['common_modules'] ==", "hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"']) def test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException):", "ConfigFactory.parse_string( \"\"\" a = ${?a}foo \"\"\" ) assert config.get(\"a\") ==", "long will be an int but on python 2 long", "b: { c = str e = \"str \" }", "pyhocon.exceptions import (ConfigException, ConfigMissingException, ConfigWrongTypeException) try: from dateutil.relativedelta import relativedelta", "test dummy', 'c': 'foo 5 bv', 'd': 'foo 5 43'", "= ${x}') fdin.flush() config = ConfigFactory.parse_string( \"\"\" include \"{tmp_file}\" x", "42 assert config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w')", "from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException, ConfigTree) from pyhocon.exceptions import", "aa ${common_modules} bb \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "config.get('c') == 'the man' assert config.get('d') == 'woof' assert config.get('a-b-c-d')", "result.get(\"string\") == 'abcdef' # test no mutation on config1 assert", ") result = config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef' # test", "{c: 3} {d: 4} { c: 5 } \"\"\" )", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\"", "2, 'c': 3, 'z': 0, 'y': -1, 'd': 4} def", "@mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string( \"\"\" bool_from_env =", "relativedelta(months=3)), ('a: 3mo', relativedelta(months=3)), ('a: 3 mon', '3 mon'), ('a:", "i = \\\"\\\"\\\" \"first line\" \"second\" line \\\"\\\"\\\" } j", "c = 7 d = 8 } } \"\"\" )", "{ d : [ ${b} ] } \"\"\", resolve=False )", "'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "= ConfigFactory.parse_string( \"\"\" x : { v1: 1 } b1", "JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\" { \"plain-backslash\":", "== 1 assert config.get_int('o3.foo.c') == 4 def test_issue_75(self): config =", "'foo bar test dummy', 'c': 'foo 5 bv', 'd': 'foo", "= ConfigFactory.parse_string( \"\"\" a: {b: 1} a: {c: 2} b:", "assert config.get_list('sub.baz') == [\"a\", \"b\"] assert config.get_list('sub2.baz') == [\"a\", \"b\"]", "= ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6 assert config.get('large-jvm-opts') == ['-XX:+UseParNewGC',", "] \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list = ${list}", "\"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\" expected = { 'plain-backslash': '\\\\',", "} \"\"\", resolve=False ) with pytest.raises(ConfigException): config.as_plain_ordered_dict() def test_quoted_strings_with_ws(self): config", "assert config == { 'a': 1, 'b': {'pa': [1], 'pb':", "test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string( \"\"\" dict = { x: 1", "common_modules = {a:perl} \\ {b:java} \\ {c:python} \"\"\" ) assert", "= ConfigFactory.parse_string('foo = \"\"\"5\"\"\"') assert config['foo'] == '5' def test_dos_chars_with_int_noeol(self):", "b' assert config.get('c') == 'b 7' def test_concat_list(self): config =", "] def test_list_of_lists(self): config = ConfigFactory.parse_string( \"\"\" a: [ [1,", "ConfigFactory.parse_string( \"\"\" a { b = foo c = bar", "me } \"\"\" ) assert config3.get('a.b.c') == 'str' assert config3.get('d')", "\"\"\" ) assert config.get('a') is None assert config.get('b')[0] is None", "== config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\"", "ConfigFactory.parse_string( \"\"\" a: {{ include \"{tmp_file}\" c: 3 d: 4", "5 }, \"d\": 4, \"k\": \"f d\" } } },", "ConfigFactory.parse_string( \"\"\" a: [ include required(\"dummy.txt\") 3 4 ] \"\"\"", "= ConfigFactory.parse_string( \"\"\" a: 1 b: 2 include package(\"my_module:my.conf\") \"\"\"", "6} } assert expected == config_tree def test_merge_overriden(self): # Adress", "[1], 'pb': [1]} } def test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\"", "'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config", "assert config['d'] == '1 bar' def test_substitution_nested_override(self): config = ConfigFactory.parse_string(", "= 5 \"\"\" ) # b is not set so", "] def test_substitution_list_with_append_substitution(self): config = ConfigFactory.parse_string( \"\"\" application.foo = 128mm", "{'x': [3, 4]} assert two == 2 assert three ==", "''' config = ConfigFactory.parse_string( \"\"\" x += def \"\"\" )", "expected assert config == json.loads(source) try: from dateutil.relativedelta import relativedelta", "assert config1['a'] == [1, 2] config2 = ConfigFactory.parse_string( \"\"\" a:", "4}, ] \"\"\" ) assert config['a'] == [ {'a': 1,", "['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\" list", ") assert config['a'] == [ [5, 6, 1, 2], [3,", "a = ${b} b = ${c} c = ${a} \"\"\")", "config2 = config2.with_fallback(config1) assert config2.get(\"list\") == [1, 2, 3, 4,", "2, 'f': 4}, {'a': 3, 'c': 4, 'f': 4}, {'a':", "${x} \"\"\" ) def test_self_ref_substitution_dict_merge(self): ''' Example from HOCON spec", "module_conf = os.path.join(module_dir, 'my.conf') # create the module folder and", "} \"\"\" ) assert config2.get('a.b.c') == 5 assert config2.get('d') ==", "42 foo: ${?a} \"\"\", resolve=False) source = ConfigFactory.parse_string( \"\"\" b:", "b is not set so show raise an exception with", "files are available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2,", "assert config == { 'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is", "t = 2 } \"\"\" ) expected = { 'a':", "config_tree.pop('a.c', 5) expected = { 'a': {'d': 6} } assert", "a: 1 b: 2 include package(\"my_module:my.conf\") \"\"\" ) # check", "config = ConfigFactory.parse_string( \"\"\" common_modules = {a:perl} \\ {b:java} \\", "x = {x: [3,4]} x = {y: [5,6]} x =", "name = ${?user} pass = ${?pass} } \"\"\") assert config['database.name']", "\"\"\" short = 12 long = 12321321837612378126213217321 negative = -15", "assert two == 2 assert three == 3 def test_self_ref_substitution_dict_path(self):", "config['a']['y'] == 42 def test_var_with_include_keyword(self): config = ConfigFactory.parse_string( \"\"\" include-database=true", "= ConfigFactory.parse_string( \"\"\" common : { } b1 = []", "a: 1 b: { pb: 5 } \"\"\") assert 5", "== [ [5, 6, 1, 2], [3, 4, 5, 6],", "\"\"\" application.foo = 128mm application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts}", "'perl'] config4 = ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules", "config['\"a.b.c.d\"'] == 3 assert config['t.d.c'] == 5 assert config['k.\"b.f.d\"'] ==", "config.get_list('a') == [1, 2, 3, 4, 5, 6] def test_bad_concat(self):", "10 names = {bar['name'] for bar in bars} types =", "3 assert config['t.d.c'] == 5 assert config['k.\"b.f.d\"'] == 7 def", "config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "== {\"v1\": 2, \"v2\": 3} def test_fallback_self_ref_substitutions_merge(self): config1 = ConfigFactory.parse_string(", "} sub2: ${sub} \"\"\" ) assert config.get_list('base.bar') == [\"a\"] assert", "== { 'a': 'foo bar' } def test_quoted_unquoted_strings_with_ws(self): config =", "} h.i.m = 7 h.i { d: 5 } h.i", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = [4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a =", "assert expected == config config2 = ConfigFactory.parse_string( \"\"\" a {", "in config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s'] == 3", "1 } b = [${b1}] \"\"\", resolve=False ) config2 =", ") assert config3['a'] == expected_res def test_include_substitution(self): with tempfile.NamedTemporaryFile('w') as", ": { a : ${foo.d}, b : 1 } bar.b", "1, 10 weeks, 5 minutes,] c: bar \"\"\" ) assert", "config['foo'] == '5' def test_dos_chars_with_int_noeol(self): config = ConfigFactory.parse_string(\"foo = 5\")", "def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\" b1 : { v1:", "\"\"\") assert config == { 'a': 'abc', 'c': 5 }", "2] config2 = ConfigFactory.parse_string( \"\"\" a: [ include file(\"{tmp_file}\") ]", "STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR}", "spec ''' config = ConfigFactory.parse_string( \"\"\" // bar.a should end", "\"a\" b = # test # test2 \"b\" c =", "'b': 2, 'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as", "'f': 4}, {'a': 3, 'c': 6, 'f': 4} ] def", "} def test_substitution_override(self): config = ConfigFactory.parse_string( \"\"\" database { host", "relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2 year', relativedelta(years=2)), ('a: 3", "for x in config['common_modules'].split() if x.strip(' ') != ''] ==", "\"\"\"base : { bar: [\"a\"] } sub : ${base} {", "= 42 \"\"\".format(tmp_file=fdin.name) ) assert config['x'] == 42 assert config['y']", "x = {y: {y: 1}} x = ${x.y} \"\"\" )", "def test_fallback_substitutions_overwrite_file(self): config1 = ConfigFactory.parse_string( \"\"\" { data-center-generic = {", "'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc': 'mist',", "\"\"\" a: foo b: 10 weeks c: bar \"\"\" )", "exception with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( \"\"\"", "resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b2 : ${x} {v2:", "ConfigFactory.parse_string( \"\"\" a = // abc abc c = 5", "5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\" data-center-generic =", "\"b\"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string( \"\"\" e : ${a}", "== data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\" a: foo", "\"\"\") def test_assign_number_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = 4", "\"\"\".format(tmp_file=fdin.name) ) assert config2['a'] == [1, 2] config3 = ConfigFactory.parse_string(", "'r' in config_tree['foo'] and 't' in config_tree['foo'] and config_tree['foo']['s'] ==", "1 } b1 : {v2: 2 } b = [${b1}]", "2, 3] def test_missing_config(self): config = ConfigFactory.parse_string( \"\"\" a =", "application.default-jvm-opts = [\"-XX:+UseParNewGC\"] application.large-jvm-opts = ${application.default-jvm-opts} [-Xm16g, ${application.foo}] application.large-jvm-opts2 =", "database { name = peopledb pass = <PASSWORD> name =", "def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings are in the same format", "'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config", "def test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\" a { d {", "assert config.get('animals.mutt.hates.garfield.say') == 'meow' def test_include_glob_list_from_samples(self): config = ConfigFactory.parse_file(\"samples/all_bars.conf\") bars", ": 43 } \"\"\" ) assert config.get(\"bar\") == {'foo': 43,", "== \"123\" def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n", "${x} \"\"\" ) def test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x", "= [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" ) assert config.get('app.java_opts') ==", "= 3 retries_msg = You have ${num} retries retries_msg =", "a.b = 3 a.b = ${a.b} a.b = ${a.b} a.c", "os.path.join(module_dir, 'my.conf') # create the module folder and necessary files", "b: 10 weeks c: bar \"\"\" ) assert config['b'] ==", "\"\"\" STRING_VAR = ${STRING_VAR} \"\"\") assert config == { 'STRING_VAR':", "pass = ${?NOT_EXISTS} } \"\"\") assert config['database.name'] == 'peopledb' assert", "assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b')", "\"\"\" ) assert config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') ==", "\"\"\") assert config['database.user'] == 'test_user' assert config['database.pass'] == '<PASSWORD>' def", "{a: 3, c: 4} ${b}, {a: 3} ${b} {c: 6},", "'abc\"test' == config['unquoted'] def test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\" value:", "config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"b1\") == {\"v1\": 2, \"v2\": 3} b", "[ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g',", "config.get('t.e.y.i').split('\\n')] == ['', '\"first line\"', '\"second\" line', ''] assert config.get_bool('t.d')", "tempfile from collections import OrderedDict from datetime import timedelta from", "ConfigFactory.parse_string( \"\"\" a: 1 b: { pb: 5 } \"\"\")", "unquoted: abc\\\\\"test \"\"\") assert 'abc\"test' == config['quoted'] assert 'abc\"test' ==", "= ConfigFactory.parse_string( \"\"\" a: {{ c: 3 include \"{tmp_file}\" d:", "-*- encoding: utf-8 -*- import json import os import shutil", ": [ ${b} ] } \"\"\", resolve=False ) config2 =", "= \"east\" } \"\"\" ) assert config5['data-center-east'] == { 'name':", "'foo']) def test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec ''' config", "abc c = 5 \"\"\") assert config == { 'a':", "foo : 42, baz : ${bar.foo} } \"\"\" ) assert", "4, 5, 6] def test_bad_concat(self): ConfigFactory.parse_string('a = 45\\n') with pytest.raises(ConfigWrongTypeException):", "assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\"", "be a long assert config['short'] == 12.12321 assert config['long1'] ==", "config.get('t') == [1, 2, 3] def test_missing_config(self): config = ConfigFactory.parse_string(", "{ } b : 1 c : ${a} { d", "config2 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "config.get_int('t.g') is None assert config.get_float('t.g') is None assert config.get_string('t.g') is", "x += {a: 1} \"\"\" ) assert config.get(\"x\") == {'a':", "config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_optional_with_merge(self): unresolved", "= ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False)", "config2.get('a.b.c') == 5 assert config2.get('d') == 'test 5' config3 =", "foo \"bar\" dummy \"\"\") assert config == { 'a': 'foo", "assert config.get_int('o3.foo.c') == 4 def test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base", "= [1,2] x += [3,4] \"\"\" ) assert config.get(\"x\") ==", "assert 'cde' == config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self):", "'test str me' assert config3.get('f') == 'test str me' def", "6] x = [-3, -2] ${x} \"\"\" ) assert config.get(\"x\")", "config.get('a-b-c-d') == 'test' assert config.get('a b c d') == 'test2'", "6 assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( \"\"\" data-center-generic", "b: foo c: ${a} ${b} c: ${b} ${a} d: ${a}", "= ConfigFactory.parse_string( \"\"\" main_language = php languages = [java, ${main_language}]", "'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\"", "== expected assert config == json.loads(source) try: from dateutil.relativedelta import", "{'v1': 1, 'v2': 3} def test_self_merge_ref_substitutions_object3(self): config1 = ConfigFactory.parse_string( \"\"\"", "list = ${list} [ 4, 5, 6 ] \"\"\", resolve=False", "d = test ${a.b.c} } \"\"\" ) assert config2.get('a.b.c') ==", "test_complex_substitutions(self): config = ConfigFactory.parse_string( \"\"\" a: 1 b: ${c} {", "113 milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds',", "config == d def test_from_dict_with_nested_dict(self): d = OrderedDict() d['banana'] =", "} \"\"\" ) assert config5['data-center-east'] == { 'name': 'east', 'cluster-size':", "cluster-size = 6 } data-center-east = { name = \"east\"", "= [perl] host_modules = ${common_modules} aa \"\"\" ) with pytest.raises(ConfigWrongTypeException):", "'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [", "assert config.get('a.b') == 1 assert config.get('a.c') == 2 assert config.get('b.c')", "5) expected = { 'a': {'d': 6} } assert expected", "config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") == value def test_parse_with_enclosing_brace(self):", "{'a': 2, 'c': 1} assert set(config.keys()) == set(['foo']) def test_self_ref_substitution_dict_otherfield(self):", "data-center-east = { name = \"east\" } data-center-east = ${data-center-generic}", "ConfigFactory.parse_string('a = \"4\" [5]') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5}", "ConfigFactory.parse_string( \"\"\" x = [1,2] x = {x: [3,4]} x", "assert config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h') == 'hey man'", "config2 = ConfigFactory.parse_string( \"\"\" a.b = 4 a.d = 3", "ConfigFactory.parse_string( \"\"\" common_modules = [php, python] host_modules = [java] ${common_modules}", "= ${x.y} \"\"\" ) assert config.get(\"x.y\") == 1 assert set(config.get(\"x\").keys())", "\"\"\" a = {f: 5} common_modules ${a} {perl: 1} \"\"\")", "\"\"\" ) assert config.get('host_modules') == ['php', 'python', 'java'] config2 =", "assert config1['a'] == [3, 4] def test_include_required_file(self): config = ConfigFactory.parse_string(", "\"\"\" ) assert config.get(\"x\") == \" def\" def test_self_append_nonexistent_array(self): config", "x = [1,2] x += [3,4] \"\"\" ) assert config.get(\"x\")", "${?c1} ${?c2} 1 \"\"\") assert 'b' not in config assert", ") assert config['a'] == [1, 2] assert config['b'] == [3,", "test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\" main_language = php languages =", "fdin.flush() config = ConfigFactory.parse_string( \"\"\" { a : { include", "6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a = [", "foo \"\"\" ) config5 = ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name}", "= [java, ${main_language}] \"\"\" ) assert config.get('languages') == ['java', 'php']", "'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( \"\"\" common_modules = [php,", "'test2' assert config.get('a b c d e') == 'test3' def", "c: ${a} ${b} c: ${b} ${a} d: ${a} ${b} d:", "assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\"", "config.get_int('t.c') == 5 assert config.get_float('t.c') == 5.0 assert config.get('t.e.y.f') ==", "-1, 'd': 4} def test_self_ref_child(self): config = ConfigFactory.parse_string( \"\"\" a.b", "the contents of both config files are available assert dict(config.as_plain_ordered_dict())", "config == { 'x': 5, 'b': 'test', 'a': 'foo bar", "${?NOT_EXISTS} pass = ${?NOT_EXISTS} } \"\"\") assert config['database.name'] == 'peopledb'", "2, 3, 4, 5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string(", "c: ${b} ${a} d: ${a} ${b} d: ${a} bar \"\"\")", "add the temp dir to sys.path so that 'my_module' can", "# comment 10 // comment 11 // comment 12 \"\"\"", "{ host = \"other.host.net\" port = 433 } } \"\"\"", "config['a.c'] == \"foo foo\" assert config['a.d'] == \"baz\" def test_comma_to_separate_expr(self):", "assert config.get_int('o1.foo.c', default=42) == 42 assert config.get_int('o3.foo.a') == 1 assert", "5, \"e\": 65 } } } } assert expected_result ==", "[ '-Xm16g', '128mm', '-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "null } \"\"\" ) assert config.get_string('t.c') == '5' assert config.get_int('t.c')", "include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) ) assert config1['a'] == [1, 2]", "1}') with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {f: 5} common_modules", "c d e\"=test3 \"\"\" ) assert config.get('a') == 1 assert", ") config2 = config2.with_fallback(config1) assert config2.get(\"dict\") == {'x': 1, 'y':", "} b : 1 c : ${a} { d :", "two == 2 assert three == 3 def test_self_ref_substitution_dict_path(self): config", "config4.get('common_modules') == ['php', 'python'] assert config4.get('host_modules') == ['java', 'php', 'python',", "= ConfigFactory.parse_string( \"\"\" a: [ {a: 1, b: 2}, {a:", "3 m', period(minutes=3)), ('a: 3m', period(minutes=3)), ('a: 3 min', '3", "d } h.i.m = 7 h.i { d: 5 }", "test_dotted_notation_merge(self): config = ConfigFactory.parse_string( \"\"\" a { b = foo", "fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"{tmp_file}\" ]", "== {'x': [3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config", "== [5, 6] def test_assign_list_strings_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "b1 : { v1: 1 } b = [${b1}] \"\"\",", "# test # test2 \"b\" c = \"c\" \"\"\" )", ") with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent} \"\"\"", "'str' assert config1.get('f') == 'str ' config2 = ConfigFactory.parse_string( \"\"\"", "${a} ${b} d: ${a} bar \"\"\") assert config['c'] == 'foo", "config3.get('host_modules') == ['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( \"\"\"", "128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" ) assert", "period(minutes=5)] def test_parse_with_enclosing_square_bracket(self): config = ConfigFactory.parse_string(\"[1, 2, 3]\") assert config", "== 5.0 assert config.get('t.e.y.f') == 7 assert config.get('t.e.y.g') == 'hey", "'foo 1' assert config['d'] == '1 bar' def test_substitution_nested_override(self): config", "\"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\" expected", "v1: 2, v2: 3 } \"\"\", resolve=False ) merged =", "== 2 assert config.get('b.c') == 5 assert config.get('b.d') == 4", "fdin.name + \"\"\"\" } a : { x : 42", "'c': '5 5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string( \"\"\"", "with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = 55", "assert config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6 } config6", "4, 5, 6], [1, 2, 5, 6, 7, 8] ]", "g: \"hey dude!\" h: hey man i = \\\"\\\"\\\" \"first", "'a': 1, 'b': {'pa': [1], 'pb': [1]}, 'c': {}, 'd':", "module folder and necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir,", "test_quoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" no_trailing_ws = \"foo\" \"bar \"", "people name = peopledb pass = <PASSWORD> } user=test_user pass=<PASSWORD>", "\"newline\": \"\\n\", \"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\"", "bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\" x", "pc: ${b.pa} } e: ${b} \"\"\", resolve=True) assert config ==", "fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: [", "assert config.get(\"x\") == {'a': 1, 'b': 2, 'c': 3, 'z':", "= ConfigFactory.parse_string( \"\"\" with-escaped-backslash: \\\"\\\"\\\" \\\\\\\\ \\\"\\\"\\\" with-newline-escape-sequence: \\\"\\\"\\\" \\\\n", "${b} {a: 1, b: 2}, {a: 3, c: 4} ${b},", "config['database.name'] == 'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self): config", "dateutil.relativedelta import relativedelta as period except Exception: from datetime import", "= \"-Xmx4g\" } \"\"\" ) assert config3.get('data-center-east.cluster-size') == 9 assert", "== 5 assert config['k.\"b.f.d\"'] == 7 def test_dotted_notation_merge(self): config =", "ConfigFactory.parse_string( \"\"\" b1 : { v1: 1 } b =", "\"no-newline\": \"\\\\n\", \"cr\": \"\\r\", \"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\"", "resolve=False) \\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert {'a': 5, 'b':", "pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock import pytest from", "assert len(b) == 2 assert b[0] == {'v2': 2} assert", "{ foo : 42, baz : ${bar.foo} } bar :", "= 121.22e-3 \"\"\" ) # on python 3 long will", "\"\"\" ) assert config3.get('data-center-east.cluster-size') == 9 assert config3.get('data-center-east.name') == 'east'", "('a: 10 weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)), ('a: 12", "assert config.get_list('sub2.baz') == [\"a\", \"b\"] def test_plain_ordered_dict(self): config = ConfigFactory.parse_string(", "ConfigFactory.parse_string( \"\"\" \"a.b.c.d\": 3 t { \"d\": { \"c\": 5", "'b' assert config['c'] == 'c' def test_assign_list_numbers_with_eol(self): config = ConfigFactory.parse_string(", "test ${a.b.c} me f = test ${a.b.e} me } \"\"\"", "HOCON spec ''' config = ConfigFactory.parse_string( \"\"\" a = ${?a}foo", "{y: 1} x = ${x.y} \"\"\" ) def test_self_ref_substitution_object(self): config", "b c b = 5 b c = b 7", "{ a : { c : 1 } } foo", "2, 'c': 3, 'd': 4 } with tempfile.NamedTemporaryFile('w') as fdin:", "VAR : ${var} } substrate-suite: { VAR : \"right\" }", "\" } } d = test ${a.b.c} f = test", "'java', 'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\" common_modules =", "'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string( \"\"\"", "'test2': 'test' } def test_from_dict_with_dict(self): d = { 'banana': 3,", "ConfigFactory.parse_string( \"\"\" string = ${string}def \"\"\", resolve=False ) result =", "['+', '`', '^', '?', '!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self,", "== 5.0 def test_list_substitutions(self): config = ConfigFactory.parse_string( \"\"\" common_modules =", "config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2'] ==", "= config.get_list('bars') assert len(bars) == 10 names = {bar['name'] for", "== {'z': 1} assert config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys()) ==", "== [ {'a': 1, 'b': 2, 'f': 4}, {'a': 3,", "\"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" list = ${list} [", "test_self_ref_substitution_dict_recurse2(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} x =", "x = [1,2] x = {x: [3,4]} x = {y:", "False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\" int_from_env", "${a.b.c} f = test ${a.b.e} } \"\"\" ) assert config2.get('a.b.c')", "be equivalent to x = ${?x} def ''' config =", "{'a': 3, 'c': 6, 'f': 4} ] def test_list_of_lists_with_merge(self): config", "\"\"\" a = 4 b = # test # test2", "${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] ==", "] b2 = [ ${compilerCommon} ${substrate-suite} ${compilerCommon} ${substrate-suite} ] \"\"\")", "} } \"\"\" ) assert config.get('a.b.c') == 7 assert config.get('a.b.d')", "= 1 d['tree'] = { 'a': 'abc\\ntest\\n', 'b': [1, 2,", "ConfigFactory.parse_string( \"\"\" a: [ [1, 2] [3, 4] ] \"\"\"", "]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1]", "ConfigFactory.parse_string( \"\"\" a = { f: 5 g } \"\"\")", "a long assert config['short'] == 12 assert isinstance(config['short'], int) assert", "foo b: [a, 1, 10 weeks, 5 minutes,] c: bar", "assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east' assert config4.get('data-center-east-prod.cluster-size')", "config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string( \"\"\" a:", "\"\"\") assert config == { 'num': 3, 'retries_msg': 'You have", "assert config.get(\"b1\")[1]['VAR'] == 'right' assert config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self):", "== 4 def test_issue_75(self): config = ConfigFactory.parse_string( \"\"\"base : {", "common_modules ${a} {perl: 1} \"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string(", "1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2]", "def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = { f:", "test_dos_chars_with_float_noeol(self): config = ConfigFactory.parse_string(\"foo = 5.0\") assert config['foo'] == 5.0", "'c': 3, 'z': 0, 'y': -1, 'd': 4} def test_self_ref_child(self):", "${x.y} \"\"\" ) assert config.get(\"x.y\") == {'z': 1} assert config.get(\"x.z\")", "'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\" common_modules = [perl]", "${?STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ,", "${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm'", "config = source.with_fallback(unresolved) assert config['foo'] == 42 def test_fallback_with_resolve(self): config3", "\"\"\" x += [1,2] \"\"\" ) assert config.get(\"x\") == [1,", "module_dir = os.path.join(temp_dir, 'my_module') module_conf = os.path.join(module_dir, 'my.conf') # create", "7 d = 8 } } \"\"\" ) assert config.get('a.b.c')", "4 seconds', period(seconds=4)), ('a: 5 second', period(seconds=5)), ('a: 6 s',", "def test_fallback_self_ref_substitutions_append(self): config1 = ConfigFactory.parse_string( \"\"\" list = [ 1,", "121.22e3423432 neg_long2 = 121.22e-3 \"\"\" ) # on python 3", "= [1, # comment 7 2, # comment 8 3,", "= ${a.b} a.b = ${a.b} a.c = [1,2] a.c =", "\"\"\" ) assert config1['a'] == 5 config2 = ConfigFactory.parse_string( \"\"\"", "1} x = ${x.y} \"\"\" ) def test_self_ref_substitution_object(self): config =", "config def test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\" // comment 1", "= unresolved.with_fallback(source) assert config['foo'] == 42 config = source.with_fallback(unresolved) assert", "{ 'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self): config =", "\"\"\" a: foo b: [a, 1, 10 weeks, 5 minutes,]", "from collections import OrderedDict from datetime import timedelta from pyparsing", "unicode path here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 = config1.with_fallback(u'samples/aws.conf')", ") assert config.get(\"x\") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config =", "= 128 app.java_opts = [ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" )", "config == { 'a': 'foo bar dummy' } def test_quoted_unquoted_strings_with_ws_substitutions(self):", "{ 'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5')", "'test': 'line1 line2', 'test2': 'test' } def test_from_dict_with_dict(self): d =", "test // comment 0 g = 6 test # comment", "2] def test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\" x = {a:", "6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string( \"\"\" x = {x:", "\"\"\" a = foo \"bar\" dummy \"\"\") assert config ==", "== [1, 2, 3] def test_missing_config(self): config = ConfigFactory.parse_string( \"\"\"", "\"\"\" ) assert config2.get('a.b.c') == 'str' assert config2.get('d') == 'test", "4 bar : { a : ${foo.d}, b : 1", "period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a:", "= ConfigFactory.parse_string( \"\"\" a = ${?a}foo \"\"\" ) assert config.get(\"a\")", "'apple': 4, 'pear': 1, 'orange': 2, } config = ConfigFactory.from_dict(d)", "[perl] host_modules = 55 ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string(", "\"\"\" dict = { x: 1 } \"\"\" ) config2", "'line1 line2', 'test2': 'test' } def test_from_dict_with_dict(self): d = {", "test_fail_parse_forbidden_characters_in_context(self, forbidden_char): with pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`',", "config = ConfigFactory.parse_string( \"\"\" a = foo \"bar\" dummy \"\"\")", "== ['-XX:+UseParNewGC', '-Xm16g'] def test_parse_URL_from_invalid(self): config = ConfigFactory.parse_URL(\"https://nosuchurl\") assert config", "{ name = \"first domain\" } } www.example-ö.com { us", "} \"\"\" ) assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a:", "line2', 'test2': 'test' } def test_from_dict_with_dict(self): d = { 'banana':", "${x} 43 \"\"\") assert config == { 'x': 5, 'b':", "import ParseBaseException, ParseException, ParseSyntaxException import mock import pytest from pyhocon", "def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\" string = abc \"\"\"", "= aa ${common_modules} bb \"\"\" ) def test_self_ref_substitution_array(self): config =", "'@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey", "test, # comment 5 } # comment 6 t =", "def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a { a: 1,", "test2 [ 3, 4,] c = [ 5, 6 ]", ") config2 = ConfigFactory.parse_string( \"\"\" string = ${string}def \"\"\", resolve=False", "== config['/abc/cde2'] assert 'fgh' == config['/abc/cde3'] def test_mutation_values(self): config =", "('a: 1 years', relativedelta(years=1)), ('a: 1years', relativedelta(years=1)), ('a: 2 year',", "assert config.get('a.b.c') == 7 assert config.get('d') == 'test 7 me'", "config2['database']['host'] == 'other.host.net' assert config2['database']['port'] == 433 assert config2['database']['url'] ==", "me } \"\"\" ) assert config3.get('a.b.c') == 5 assert config3.get('d')", "'test' assert config.get_string('a.b') == 'test' assert config.get('t') == [1, 2,", "config1 def test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" a = 45", "def test_assign_next_line(self): config = ConfigFactory.parse_string( \"\"\" a = // abc", "= ConfigFactory.parse_string( \"\"\"base : { bar: [\"a\"] } sub :", "= [1,2] x = {x: [3,4]} x = {y: [5,6]}", "assert config.get('b') == 'abc' assert config.get('c') == 'the man' assert", "= [-3, -2] ${x} \"\"\" ) assert config.get(\"x\") == [-3,", "0} ${x} x = {y: -1} ${x} {d: 4} \"\"\"", "== [\"a\"] assert config.get_list('sub.baz') == [\"a\", \"b\"] assert config.get_list('sub2.baz') ==", "= ConfigFactory.parse_string(\"foo = 5\") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self):", "def test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\" // comment 1 #", "ConfigFactory.parse_string( \"\"\" a = \"a\" b = # test #", "\"\"\") expected_result = { \"a\": { \"d\": { \"g\": {", "3, d: 6}') assert 3 == config_tree.pop('a.b', 5) assert 5", "[ -Xms${app.heap_size}m -Xmx${app.heap_size}m ] \"\"\" ) assert config.get('app.java_opts') == [", "= ConfigFactory.parse_string( \"\"\" x = [1,2] x = ${x} [3,4]", "def test_concat_list(self): config = ConfigFactory.parse_string( \"\"\" a = [1, 2]", "1, 'b': {'pa': [1], 'pb': [1]}, 'c': {}, 'd': {'pc':", "('a: 112 millis', timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a: 114", "} d = test ${a.b.c} f = test ${a.b.e} }", "assert merged.get(\"c.d\") == [1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\"", "foo: 42 foo: ${?a} \"\"\", resolve=False) source = ConfigFactory.parse_string( \"\"\"", "config == { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref(self):", "{ 'no_trailing_ws': \"foo bar \", 'trailing_ws': \"foo bar \", 'trailing_ws_with_comment':", "minutes', period(minutes=1)), ('a: 1minutes', period(minutes=1)), ('a: 2 minute', period(minutes=2)), ('a:", "ConfigFactory.parse_string( \"\"\" a = foo \"bar\" dummy \"\"\") assert config", "} d = test ${a.b.c} me e = 7 }", "${?pass} } \"\"\") assert config['database.name'] == 'peopledb' assert config['database.pass'] ==", "'You have 3 retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "\"\"\" ) # check that the contents of both config", "{ foo : { a : 1 b : 2", "def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3", "${x}} \"\"\" ) assert config.get(\"x.x\") == [3, 4] assert config.get(\"x.y\")", "} } d = ${a.b.c} } \"\"\" ) assert config1.get('a.b.c')", "3, c: 4} ${b}, {a: 3} ${b} {c: 6}, ]", "ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name') == 'second", "== [1, 2, 3, 4] def test_self_append_string(self): ''' Should be", "2 assert config.get_int('o2.foo.b') == 3 assert config.get_int('o3.foo.b') == 3 assert", "\"foo foo\" assert config['a.d'] == \"baz\" def test_comma_to_separate_expr(self): config =", "{ cluster-size = 9, opts = \"-Xmx4g\" } \"\"\" )", "= 4 b = # test # test2 5 c", "{'a': 1, 'b': 2}, {'a': 3, 'c': 4} ] def", "foo c: ${a} ${b} c: ${b} ${a} d: ${a} ${b}", "\\ {c:python} \"\"\" ) assert config['common_modules'] == {'a': 'perl', 'b':", "assert config1.get('a.b.c') == 5 assert config1.get('d') == 5 config2 =", "field3 = ${Test.field2}\"3\" } \"\"\" ) assert config.get_string(\"A.field1\") == \"1\"", "os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as fdin: fdin.write(\"{c:", "assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC', ] def test_substitution_list_with_append_substitution(self):", "assert config['short'] == 12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1']", "from datetime import timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException", "config.get(\"x\") == [1, 2, 3, 4] def test_self_append_string(self): ''' Should", "'php', 'python', 'perl', 'c', 'go'] def test_list_element_substitution(self): config = ConfigFactory.parse_string(", "assert config6 == { 'longName': 'long foo', 'name': 'foo' }", "{'b': 3, 'c': [1, 2], 'd': {'foo': 'bar'}} def test_concat_multi_line_string(self):", "ConfigFactory.parse_string( \"\"\" a = a b c b = 5", "{ v1: 1 } b = [${b1}] \"\"\", resolve=False )", "{'z': 1} assert config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys()) == set(['y',", "assert config2.get(\"dict\") == {'x': 1, 'y': 2} def test_fallback_self_ref_substitutions_concat_string(self): config1", "} a.b { c = 7 d = 8 }", "test_string_from_environment_self_ref_optional(self): config = ConfigFactory.parse_string( \"\"\" STRING_VAR = ${?STRING_VAR} \"\"\") assert", "\"\"\" a: [ [1, 2] [3, 4] ] \"\"\" )", "'cluster-size': 6 } config6 = ConfigFactory.parse_string( \"\"\" data-center-generic = {", "[\"-XX:+UseParNewGC\"] \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ]", "config.get('c') == 'b 7' def test_concat_list(self): config = ConfigFactory.parse_string( \"\"\"", "ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert len(b) == 2 assert b[0]", "foo c = bar } a.c = ${a.b}\" \"${a.b} a.d", "assert config1['a'] == 5 config2 = ConfigFactory.parse_string( \"\"\" { database", "e') == 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\" a", "assert config['long'] == 12321321837612378126213217321 assert isinstance(config['negative'], int) assert config['negative'] ==", "== set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self): config = ConfigFactory.parse_string( \"\"\" x", "config and include the other config file from 'my_module' config", "<PASSWORD> name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS} } \"\"\") assert", "def test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t = { c =", "${x} {d: 4} \"\"\" ) assert config.get(\"x\") == {'a': 1,", "'c': 'python'} def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') ==", "test_parse_with_comments(self): config = ConfigFactory.parse_string( \"\"\" // comment 1 # comment", "\"abc\" not in str(config2) def test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\"", "\"\"\" a: 1 b: 2 include package(\"my_module:my.conf\") \"\"\" ) #", "['c', 'd'] assert config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config", "{'a': 3, 'c': 4} ] def test_list_of_lists(self): config = ConfigFactory.parse_string(", "{ b: 1 c: 2 } \"\"\" ) config2 =", "= \"mist\" } \"\"\" ) # use unicode path here", "a: {{ c: 3 d: 4 include \"{tmp_file}\" }} \"\"\".format(tmp_file=fdin.name)", "2 } \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"dict\")", "{'a': 3, 'c': 4, 'f': 4}, {'a': 3, 'c': 6,", "data_set[1] def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\" a: foo b:", "config == { 'bool_from_env': 'false' } assert config.get_bool('bool_from_env') is False", "port = 8000 url = ${database.host}\":\"${database.port} } database { host", "def test_one_line_quote_escape(self): config = ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\"", "from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import mock import pytest", "2}, {a: 3, c: 4}, ] \"\"\" ) assert config['a']", "# b is not set so show raise an exception", "timedelta(milliseconds=112)), ('a: 113 milli', timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a:", "{ c = 5 } } a.b { c =", "ConfigFactory.parse_string(\"\"\" foo: ${bar} foo: ${baz} bar: {r: 1, s: 2}", "timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a:", "assert config.get_bool('bool_from_env') is False @mock.patch.dict(os.environ, INT_VAR='5') def test_int_from_environment(self): config =", "= 7 } \"\"\" ) assert config.get('a.b.c') == 7 assert", "timedelta(milliseconds=113)), ('a: 114 ms', timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a:", "ConfigFactory.parse_string( \"\"\" { a: { b: 5 } } \"\"\"", "== \"123\" assert config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\") == \"12\"", "foo : 42, baz : ${bar.foo} } bar : {", "shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = { 'a': 1, 'b':", "5 b=${a}${a} c=${a} ${a} \"\"\" ) assert config == {", "1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000 nano',", "= 6 } data-center-east = {name = \"east\"} ${data-center-generic} data-center-east-prod", "== \"abc def\" def test_self_append_non_existent_string(self): ''' Should be equivalent to", "{name = \"east\"} ${data-center-generic} { cluster-size = 9, opts =", "] \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"list\") ==", "test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo bar \"\"\")", "= 4 \"\"\" ) assert config.get(\"bar\") == {'a': 4, 'b':", "baz : ${bar.foo} } bar : { foo : 43", "\"hey dude!\" h: hey man i = \\\"\\\"\\\" \"first line\"", "trailing_ws = \"foo\" \"bar \"{ws} trailing_ws_with_comment = \"foo\" \"bar \"{ws}//", "'test' assert config.get('t') == [1, 2, 3] def test_missing_config(self): config", "'pear': 1, 'orange': 2, } config = ConfigFactory.from_dict(d) assert config", "4 } g.h.k: f d } h.i.m = 7 h.i", "config['h'] == 1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" num", "= \"other.host.net\" port = 433 } } \"\"\" ) assert", "\"\"\" ) assert config['a']['x'] == 42 assert config['a']['y'] == 42", "4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b =", "assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') == 'meow' def test_include_glob_dict_from_samples(self):", "period(microseconds=110)), ('a: 111 microsecond', period(microseconds=111)), ('a: 112 micros', period(microseconds=112)), ('a:", "assert config == { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def", "= ConfigFactory.parse_string( \"\"\" list += [ 4, 5, 6 ]", "== 42 assert config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') == 4", ") assert config1.get('a.b.c') == 'str' assert config1.get('d') == 'str' assert", "x += {b: 2} \"\"\" ) assert config.get(\"x\") == {'a':", "pass = <PASSWORD> } user=test_user pass=<PASSWORD> database { user =", "c: bar \"\"\" ) assert config['b'] == period(weeks=10) def test_parse_with_list_mixed_types_with_durations_and_trailing_comma(self):", "\"\"\" ) assert config.get(\"a\") == {'b': 3, 'c': [1, 2],", "and config['b'] == 1 def test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\"", "def test_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a = foo bar", ": ${common} { VAR : ${var} } substrate-suite: { VAR", "${b} \"\"\", resolve=True) assert config == { 'a': 1, 'b':", "'e': {'pa': [1], 'pb': [1]} } def test_assign_next_line(self): config =", "f = test ${a.b.e} } \"\"\" ) assert config2.get('a.b.c') ==", "\\ .with_fallback(config3) assert {'a': 5, 'b': 5, 'c': 5} ==", "def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [ include required(\"dummy.txt\")", "c: 3, d: 4,} c { e: 5, f: 6", "== '5 b' assert config.get('c') == 'b 7' def test_concat_list(self):", "== {'v2': 2} assert b[1] == {'v1': 1, 'v2': 3}", "abc ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules =", "= ConfigFactory.parse_string( \"\"\" a: [ include \"{tmp_file}\" ] \"\"\".format(tmp_file=fdin.name) )", "${a.b}\" \"${a.b} a.d = baz \"\"\" ) assert config['a.b'] ==", "= {y: {y: 1}} x = ${x.y} \"\"\" ) assert", ") assert config6['data-center-east'] == { 'name': 'east', 'cluster-size': 6 }", "include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config3['a'] == [1, 2]", "class TestConfigParser(object): def test_parse_simple_value(self): config = ConfigFactory.parse_string( \"\"\"t = {", "pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [ include required(\"dummy.txt\") 3 4 ]", "('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert", "config.get(\"x.z\") == 1 assert set(config.get(\"x\").keys()) == set(['y', 'z']) def test_self_ref_substitution_dict_path_hide(self):", "\"\"\" ) assert config.get('a') == 1 assert config.get('b') == 'abc'", "assert config['c'] == 6 def test_assign_int(self): config = ConfigFactory.parse_string( \"\"\"", "# add the temp dir to sys.path so that 'my_module'", "assert config['e'] == 45 assert 'g' not in config assert", "= ${INT_VAR} \"\"\") assert config == { 'int_from_env': '5' }", "('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)), ]) def", "from dateutil.relativedelta import relativedelta as period except Exception: from datetime", "== {'a': 'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config", "h', period(hours=9)), ('a: 10 weeks', period(weeks=10)), ('a: 11 week', period(weeks=11)),", "{ 'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree =", "config.get_list('t.g') is None assert config.get_config('t.g') is None @pytest.mark.parametrize('forbidden_char', ['+', '`',", "c = [ \"e\", \"f\" ] \"\"\" ) assert config['a']", ") assert config1['a'] == 5 config2 = ConfigFactory.parse_string( \"\"\" {", "assert config['foo'] == 42 config = source.with_fallback(unresolved) assert config['foo'] ==", "config1 = ConfigFactory.parse_string( \"\"\" a: [ include \"dummy.txt\" 3 4", "assert config == { 'num': 3, 'retries_msg': 'You have 3", "''' config = ConfigFactory.parse_string( \"\"\" a = ${?a}foo \"\"\" )", "\"\"\" ) assert config1.get('a.b.c') == 'str' assert config1.get('d') == 'str'", "'baz': 43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example", "c = \"c\" \"\"\" ) assert config['a'] == 'a' assert", "f = ${a.b.e} } \"\"\" ) assert config1.get('a.b.c') == 'str'", "{ } b1 = [] var = \"wrong\" compilerCommon :", "def test_parse_forbidden_characters_quoted(self, forbidden_char): value = \"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a:", "\"\"\") def test_invalid_dict(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string( \"\"\" a = {", "== [1, 2, 3, 4, 5, 6] def test_self_merge_ref_substitutions_object(self): config1", "for bar in bars} types = {bar['type'] for bar in", "= ${dict} { y: 2 } \"\"\", resolve=False ) config2", "expected = { 'plain-backslash': '\\\\', 'tab': '\\t', 'no-tab': '\\\\t', 'newline':", "{ c : 1 } } foo : ${foo.a} foo", "fdin.write('{ x : 10, y : ${x} }') fdin.flush() config", "'2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree", ": { include \"\"\" + '\"' + fdin.name + \"\"\"\"", "\"\"\") assert config == { 'test': 'line1 line2', 'test2': 'test'", "assert config2.get('d') == 'test 5' config3 = ConfigFactory.parse_string( \"\"\" {", "def test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" )", "== 1 def test_object_field_substitution(self): config = ConfigFactory.parse_string( \"\"\" A =", "test no mutation on config2 assert \"abc\" not in str(config2)", "assert config1.get('d') == 'str' assert config1.get('f') == 'str ' config2", ") config3 = config1.with_fallback(config2) assert config3['a'] == { 'b': 1,", "f = test ${a.b.e} me } \"\"\" ) assert config3.get('a.b.c')", "{ 'a': 1, 'b': {'pa': [1], 'pb': [1]}, 'c': {},", "def test_string_substitutions(self): config1 = ConfigFactory.parse_string( \"\"\" { a: { b:", "config.get_string(\"Test.field1\") == \"1\" assert config.get_string(\"Test.field2\") == \"12\" assert config.get_string(\"Test.field3\") ==", "list = [ 1, 2, 3 ] \"\"\" ) config2", "config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def test_multiline_with_backslash(self): config", "g = null } \"\"\" ) assert config.get_string('t.c') == '5'", "merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1] def", "== 6 assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( \"\"\"", "= ${list} [ 4, 5, 6 ] \"\"\", resolve=False )", "pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( \"\"\" a =", "= ConfigFactory.parse_string( \"\"\" a = null b = [null] \"\"\"", "6 ] \"\"\", resolve=False ) config2 = config2.with_fallback(config1) assert config2.get(\"list\")", "== 'test_user' assert config['database.pass'] == '<PASSWORD>' def test_substitution_flat_override(self): config =", "assert config['a.c'] == \"foo foo\" assert config['a.d'] == \"baz\" def", "assert config == expected assert config == json.loads(source) try: from", "= \"a\" b = # test # test2 \"b\" c", "assert config.get('a.b.c') == 7 assert config.get('a.b.d') == 8 def test_concat_dict(self):", "${database.host}\":\"${database.port} } database { host = ${?DB_HOST} } database {", "} substrate-suite: { VAR : \"right\" } b1 = [", "\"\"\" ) assert config2.get('data-center-east.cluster-size') == 6 assert config2.get('data-center-east.name') == 'east'", "\"d\": 5, \"e\": 65 } } } } assert expected_result", "def test_concat_dict(self): config = ConfigFactory.parse_string( \"\"\" a: {b: 1} a:", "3 a: { # comment 4 b: test, # comment", "{ \"u\": 5 }, \"d\": 4, \"k\": \"f d\" }", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a = ${b} b = ${c} c", "'test str me' def test_string_substitutions_with_no_space(self): config = ConfigFactory.parse_string( \"\"\" app.heap_size", "test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules =", "'tab': '\\t', 'no-tab': '\\\\t', 'newline': '\\n', 'no-newline': '\\\\n', 'cr': '\\r',", "{ \"h\": { \"j\": { \"u\": 5 }, \"d\": 4,", "${?x} def ''' config = ConfigFactory.parse_string( \"\"\" x += def", "s: 2} baz: {s: 3, t: 4} \"\"\") assert 'r'", "assert b[0] == {'v2': 2} assert b[1] == {'v1': 1,", "strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\" { \"plain-backslash\": \"\\\\\",", "\"\"\" include \"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name) ) assert config['x']", "config5 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6", "{ a : 2 } \"\"\" ) assert config.get('foo') ==", "[ {'a': 1, 'b': 2}, {'a': 3, 'c': 4} ]", "= {bar['name'] for bar in bars} types = {bar['type'] for", "'test', 'a': 'foo bar test dummy', 'c': 'foo 5 bv',", "import shutil import tempfile from collections import OrderedDict from datetime", "config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree == { 'a': ['foo\"',", ": ${x} }') fdin.flush() config = ConfigFactory.parse_string( \"\"\" { a", "= 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g,", "== 'foo' assert set(config.keys()) == set(['a']) def test_self_ref_substitution_dict_recurse_part(self): with pytest.raises(ConfigSubstitutionException):", "config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3) assert", "resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\", resolve=False) \\ .with_fallback(config2, resolve=False) \\ .with_fallback(config3)", "config.get(\"x\") == \"abc def\" def test_self_append_non_existent_string(self): ''' Should be equivalent", "test ${a.b.c} f = test ${a.b.e} } \"\"\" ) assert", "\"\"\" a = 5 \"\"\" ) # b is not", "tempfile.NamedTemporaryFile('w') as fdin: fdin.write('[1, 2]') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\"", "\"foo bar \", 'trailing_ws_with_comment': \"foo bar \" } def test_unquoted_strings_with_ws(self):", "as period except Exception: from datetime import timedelta as period", "= ConfigFactory.from_dict(d) assert config == d def test_from_dict_with_nested_dict(self): d =", "${b.pa} } e: ${b} \"\"\", resolve=True) assert config == {", "d: 5 } h.i { e:65 } } \"\"\") expected_result", "man i = \\\"\\\"\\\" \"first line\" \"second\" line \\\"\\\"\\\" }", "substrate-suite: { VAR : \"right\" } b1 = [ ${compilerCommon}", "def test_merge_overriden(self): # Adress issue #110 # ConfigValues must merge", "def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] except", "x: 1 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" dict", "${b} c: ${b} ${a} d: ${a} ${b} d: ${a} bar", "'str' assert config1.get('d') == 'str' assert config1.get('f') == 'str '", "test2 = test \"\"\") assert config == { 'test': 'line1", "1, 'b': 2, 'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self):", "('a: 6 s', period(seconds=6)), ('a: 6 sec', '6 sec'), ('a:", ") assert config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6 }", "5, 6] def test_self_merge_ref_substitutions_object(self): config1 = ConfigFactory.parse_string( \"\"\" a :", "test_fallback_non_root(self): root = ConfigFactory.parse_string( \"\"\" a = 1 mid.b =", "${bar.foo} } \"\"\" ) assert config.get(\"bar\") == {'foo': 42, 'baz':", "mutation on config1 assert result is not config1 # test", "@pytest.mark.xfail def test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x :", "config3['a'] == { 'b': 1, 'c': 2, 'd': 3 }", "ns', period(microseconds=1140)), ]) def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert", "2, 3, 4, 5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string(", "'foo': '1', 'bar': '2' } def test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\",", "config1.get('f') == 'str ' config2 = ConfigFactory.parse_string( \"\"\" { a:", "${b} [1, 2] ${b} [7, 8] ] \"\"\" ) assert", "'c': 3} finally: shutil.rmtree(temp_dir, ignore_errors=True) def test_include_dict(self): expected_res = {", "bar: {r: 1, s: 2} baz: {s: 3, t: 4}", "2 minute', period(minutes=2)), ('a: 3 m', period(minutes=3)), ('a: 3m', period(minutes=3)),", "} } d = ${a.b.c} f = ${a.b.e} } \"\"\"", "Quoted strings are in the same format as JSON strings,", "[-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"]", "include \"{tmp_file}\" x = 42 \"\"\".format(tmp_file=fdin.name) ) assert config['x'] ==", "b = # test # test2 [ \"c\", \"d\",] c", "config2.get('f') == 'test str ' config3 = ConfigFactory.parse_string( u\"\"\" {", ": { } b : 1 c : ${a} {", "'cluster-size': 6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo = bar\")", "${?c1} ${?c2} h = ${?c1} ${?c2} 1 \"\"\") assert 'b'", "\"\"\" ) assert config4.get('data-center-east.cluster-size') == 6 assert config4.get('data-center-east.name') == 'east'", "] \"\"\" ) assert config['a'] == [ {'a': 1, 'b':", "assert config.get('a b c d') == 'test2' assert config.get('a b", "use unicode path here for regression testing https://github.com/chimpler/pyhocon/issues/44 config2 =", "= [1,2] a.c = ${a.c} a.d = {foo: bar} a.d", "timedelta(milliseconds=114)), ('a: 110 nanoseconds', period(microseconds=0)), ('a: 11000 nanoseconds', period(microseconds=11)), ('a:", "'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'], 'large-jvm-opts': ['-XX:+UseParNewGC', '-Xm16g'] } def test_fallback_self_ref_substitutions_append(self): config1", "expected == config2 def test_include_missing_required_file(self): with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a:", "database { host = \"localhost\" port = 8000 url =", "config.get(\"x\") == {'a': 1} def test_self_ref_substitution_array_to_dict(self): config = ConfigFactory.parse_string( \"\"\"", "-1} ${x} {d: 4} \"\"\" ) assert config.get(\"x\") == {'a':", "c: 2 } \"\"\" ) config2 = ConfigFactory.parse_string( \"\"\" a.b", ") assert config['a'] == [ {'a': 1, 'b': 2, 'f':", "== 12.12321 assert config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1", "source.with_fallback(unresolved) assert config['foo'] == 42 def test_fallback_with_resolve(self): config3 = ConfigFactory.parse_string(\"c=5\")", "ConfigFactory.parse_string( \"\"\" e : ${a} { } \"\"\", resolve=False )", "== [1, 2] def test_self_append_object(self): config = ConfigFactory.parse_string( \"\"\" x", ": 10, y : ${x} }') fdin.flush() config = ConfigFactory.parse_string(", "== { 'a': 'abc', 'c': 5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def", "'^', '?', '!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with", "t = 2 } \"\"\" ) assert expected == config2", "'?', '!', '@', '*', '&']) def test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException):", "3, c: 4}, ] \"\"\" ) assert config['a'] == [", "= -121.22 c = .54 d = -.54 \"\"\" )", "1} \"\"\" ) assert config.get(\"x\") == {'a': 1} def test_self_ref_substitution_array_to_dict(self):", "pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = {b: 5} \"4\"') def test_string_substitutions(self): config1 =", "${o2} \"\"\" ) assert config.get_int('o1.foo.b') == 2 assert config.get_int('o2.foo.b') ==", ") assert config['common_modules'] == {'a': 'perl', 'b': 'java', 'c': 'python'}", "x += def \"\"\" ) assert config.get(\"x\") == \"abc def\"", "'-Xm16g', '128mm' ] assert config[\"application.large-jvm-opts2\"] == [ '-Xm16g', '128mm', '-XX:+UseParNewGC',", "\"no-cr\": \"\\\\r\", \"windows\": \"c:\\\\temp\" } \"\"\" expected = { 'plain-backslash':", "test_quoted_unquoted_strings_with_ws_substitutions(self): config = ConfigFactory.parse_string( \"\"\" x = 5 b =", "url = ${database.host}\":\"${database.port} } database { host = ${?DB_HOST} }", "3} b += [${b2}] \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1,", "'6 test' assert config.get('a.b') == 'test' assert config.get_string('a.b') == 'test'", "] \"\"\" ) assert config['a'] == [1, 2] assert config['b']", "assert config.get_string('t.c') == '5' assert config.get_int('t.c') == 5 assert config.get_float('t.c')", "2} def test_fallback_self_ref_substitutions_concat_string(self): config1 = ConfigFactory.parse_string( \"\"\" string = abc", "in the same format as JSON strings, See: https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\"", "1, 2, ] b = # test # test2 [", "= { f: 7 g: \"hey dude!\" h: hey man", "ConfigFactory.parse_string( \"\"\"base : { bar: [\"a\"] } sub : ${base}", "7 assert config.get('d') == 'test 7 me' def test_multiple_substitutions(self): config", "with pytest.raises(ConfigMissingException): config.get('b') def test_parse_null(self): config = ConfigFactory.parse_string( \"\"\" a", "== ['java', 'php', 'python', 'perl'] config4 = ConfigFactory.parse_string( \"\"\" common_modules", "{}, 'd': {'pc': [1]}, 'e': {'pa': [1], 'pb': [1]} }", "import mock import pytest from pyhocon import (ConfigFactory, ConfigParser, ConfigSubstitutionException,", "[${b1}] \"\"\", resolve=False ) config2 = ConfigFactory.parse_string( \"\"\" b2 :", "\"\"\" ) config = root.get_config(\"mid\").with_fallback(root) assert config['a'] == 1 and", "pytest.raises(ConfigException): config.get_bool(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_list(u'www.example-ö.com.us.name') with pytest.raises(ConfigException): config.get_config(u'www.example-ö.com.us.name') with pytest.raises(ConfigWrongTypeException):", "'\\n\\\\\\\\\\n' assert config['with-newline-escape-sequence'] == '\\n\\\\n\\n' assert config['with-escaped-newline-escape-sequence'] == '\\n\\\\\\\\n\\n' def", "== 'test str me' assert config3.get('f') == 'test str me'", "config3 = ConfigFactory.parse_string(\"c=5\") config2 = ConfigFactory.parse_string(\"b=${c}\", resolve=False) config1 = ConfigFactory.parse_string(\"a=${b}\",", "[php, python] host_modules = [java] ${common_modules} \"\"\" ) assert config2.get('host_modules')", "y : ${x} }') fdin.flush() config = ConfigFactory.parse_string( \"\"\" {", "8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException):", "b c = b 7 \"\"\" ) assert config.get('a') ==", "def test_self_append_non_existent_string(self): ''' Should be equivalent to x = ${?x}", "x = 42 \"\"\".format(tmp_file=fdin.name) ) assert config['x'] == 42 assert", "\"plain-backslash\": \"\\\\\", \"tab\": \"\\t\", \"no-tab\": \"\\\\t\", \"newline\": \"\\n\", \"no-newline\": \"\\\\n\",", "\"\"\" b = [5, 6] a: [ ${b} [1, 2]", "ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size = 6 } data-center-east", "ConfigFactory.parse_string( \"\"\" b = [5, 6] a: [ ${b} [1,", "-15 \"\"\" ) # on python 3 long will be", "1, b: 2}, {a: 3, c: 4}, ] \"\"\" )", "d = OrderedDict() d['banana'] = 3 d['apple'] = 4 d['pear']", "None assert config.get_bool('t.g') is None assert config.get_list('t.g') is None assert", "def test_self_append_nonexistent_array(self): config = ConfigFactory.parse_string( \"\"\" x += [1,2] \"\"\"", "d : 2 } foo.d = 4 \"\"\" ) assert", "= [perl] host_modules = 55 ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException):", "2, ] b = # test # test2 [ 3,", "9, opts = \"-Xmx4g\" } \"\"\" ) assert config3.get('data-center-east.cluster-size') ==", "\"/abc/cde2\": \"cde\" /abc/cde3: \"fgh\" \"\"\") assert 'abc' == config['/abc/cde1'] assert", ") assert config.get_string('a.b') == '5' @pytest.mark.parametrize('data_set', [ ('a: 1 minutes',", "= ${c} c = ${a} \"\"\") def test_assign_number_with_eol(self): config =", "STRING_VAR = ${?STRING_VAR} \"\"\") assert config == { 'STRING_VAR': 'value_from_environment'", "[ ${b} [1, 2] [3, 4] ${b} [1, 2] ${b}", "${b} dummy c = foo ${x} bv d = foo", "== [1, 2, 3, 4, 5, 6] assert config.get_list('a') ==", "'name': 'east', 'cluster-size': 6 } config6 = ConfigFactory.parse_string( \"\"\" data-center-generic", "== 'str' assert config2.get('d') == 'test str' assert config2.get('f') ==", "[3,4]} x = {y: [5,6]} x = {z: ${x}} \"\"\"", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} {y: 1} x =", "= ConfigFactory.parse_string( \"\"\" short = 12 long = 12321321837612378126213217321 negative", "} \"\"\" ) assert config['a']['x'] == 42 assert config['a']['y'] ==", "= { 'a': 1, 'b': 2, 'c': 3, 'd': 4", "\"\"\" ) assert config.get('languages') == ['java', 'php'] def test_substitution_list_with_append(self): config", "7' def test_concat_list(self): config = ConfigFactory.parse_string( \"\"\" a = [1,", "x : 42 } } \"\"\" ) assert config['a']['x'] ==", "${sub} \"\"\" ) assert config.get_list('base.bar') == [\"a\"] assert config.get_list('sub.baz') ==", "3 retries' } def test_substitution_cycle(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" a", "433 } } \"\"\" ) assert config2['database']['host'] == 'other.host.net' assert", "assert config4.get('data-center-east-prod.tmpDir') == '/tmp' config5 = ConfigFactory.parse_string( \"\"\" data-center-generic =", "config.get_int('o3.foo.a') == 1 assert config.get_int('o3.foo.c') == 4 def test_issue_75(self): config", "} c: { } d: { pc: ${b.pa} } e:", ") assert config.get(\"x\") == {'a': 1, 'b': 2} def test_self_append_nonexistent_object(self):", "== 6 def test_assign_int(self): config = ConfigFactory.parse_string( \"\"\" short =", "[1,2] x = {x: [3,4]} x = {y: [5,6]} x", "${a} d: ${a} ${b} d: ${a} bar \"\"\") assert config['c']", "None assert config.get_float('t.g') is None assert config.get_string('t.g') is None assert", "= 5 \"d\" = true e.y = { f: 7", "\"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = abc ${non_existent}", "${a.b} a.c = [1,2] a.c = ${a.c} a.d = {foo:", "config = ConfigFactory.parse_string( \"\"\"t = { c = 5 \"d\"", "def test_parse_string_with_duration(self, data_set): config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1]", "} \"\"\" ) assert config2.get('a.b.c') == 'str' assert config2.get('d') ==", "def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\" x : { v1:", "config.get(\"x.x\") == [3, 4] assert config.get(\"x.y\") == [5, 6] assert", "= # test # test2 5 c = 6 \"\"\"", ") assert config['b'] == ['a', 1, period(weeks=10), period(minutes=5)] def test_parse_with_enclosing_square_bracket(self):", "in config assert config['d'] == 4 assert config['e'] == 45", "config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c', 5) expected = {", "a-b-c-d=test, a b c d=test2, \"a b c d e\"=test3", "# if both are ConfigTree config_tree = ConfigFactory.parse_string(\"\"\" foo: ${bar}", "\"\"\" ) config5 = ConfigFactory.parse_string( u\"\"\" longName: \"long \"${?name} \"\"\",", ": ${base} { baz: ${base.bar} [\"b\"] } sub2: ${sub} \"\"\"", "assert config.get('a.b') == 'test' assert config.get_string('a.b') == 'test' assert config.get('t')", "ConfigFactory.parse_string( \"\"\" a = null b = [null] \"\"\" )", "with open(module_conf, 'w') as fdin: fdin.write(\"{c: 3}\") # add the", "config = ConfigFactory.parse_string( \"\"\" foo : { a : {", "5 g { h.d: 4 } g.h.k: f d }", "that the contents of both config files are available assert", "[1] def test_self_merge_ref_substitutions_object2(self): config1 = ConfigFactory.parse_string( \"\"\" x : {", "a : { include \"\"\" + '\"' + fdin.name +", "= ConfigFactory.parse_string( \"\"\" test_no_quotes: abc\\\\n\\\\n test_quotes: \"abc\\\\n\\\\n\" \"\"\" ) assert", "retries retries_msg = ${?CUSTOM_MSG} \"\"\") assert config == { 'num':", "expected = { 'a': { 'garfield': { 'say': 'meow' },", "test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b = [5, 6] a:", "assert config['neg_long1'] == 121.22E-1 assert config['long2'] == 121.22E3423432 assert config['neg_long2']", "config['c'] == ['e', 'f'] def test_assign_dict_strings_with_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\"", "\"2\" # DO NOT CHANGE ANY OF THE ABOVE SETTINGS!\"\"\")", "'east' config2 = ConfigFactory.parse_string( \"\"\" data-center-generic = { cluster-size =", "def\" def test_self_append_non_existent_string(self): ''' Should be equivalent to x =", "ConfigFactory.parse_string( \"\"\" common_modules = [perl] host_modules = aa ${common_modules} \"\"\"", "= test a = foo \"bar\" ${b} dummy c =", "if x.strip(' ') != ''] == ['perl', 'java', 'python'] def", "test_mutation_values(self): config = ConfigFactory.parse_string( \"\"\" common : { } b1", "\"\"\" a: [ include url(\"file://{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name) ) assert config3['a']", "test_triple_quotes_same_line(self): config_tree = ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree == { 'a':", "\"\"\" a = [1, 2] [3, 4] [ 5, 6", "121.22e-3 \"\"\" ) # on python 3 long will be", "'b': 1, 'c': 2, 'd': 3 } config4 = ConfigFactory.parse_string(", "INT_VAR='5') def test_int_from_environment(self): config = ConfigFactory.parse_string( \"\"\" int_from_env = ${INT_VAR}", "e = \"str \" } } d = test ${a.b.c}", "host_modules = aa ${common_modules} bb \"\"\" ) def test_self_ref_substitution_array(self): config", "to x = ${?x} def ''' config = ConfigFactory.parse_string( \"\"\"", "ConfigFactory.parse_string( \"\"\" A = ${Test} Test { field1 = 1", ") assert config2.get('a.b.c') == 'str' assert config2.get('d') == 'test str'", "${Test.field2}\"3\" } \"\"\" ) assert config.get_string(\"A.field1\") == \"1\" assert config.get_string(\"A.field2\")", "opts = \"-Xmx4g\" } \"\"\" ) assert config3.get('data-center-east.cluster-size') == 9", "config2 = ConfigFactory.parse_string( \"\"\" a: [ include file(\"{tmp_file}\") ] \"\"\".format(tmp_file=fdin.name)", "def test_parse_URL_from_samples(self): config = ConfigFactory.parse_URL(\"file:samples/aws.conf\") assert config.get('data-center-generic.cluster-size') == 6 assert", "ConfigFactory.parse_string('common_modules [perl]') with pytest.raises(ParseException): ConfigFactory.parse_string('common_modules {} {perl: 1}') with pytest.raises(ParseSyntaxException):", "\"\"\" common_modules = abc ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string(", "'-XX:+UseParNewGC' ] def test_non_existent_substitution(self): with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules =", "433 assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 = ConfigFactory.parse_string(", "= ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) b = merged.get(\"b\") assert len(b) ==", "test_include_substitution2(self): with tempfile.NamedTemporaryFile('w') as fdin: fdin.write('{ x : 10, y", "config = ConfigFactory.parse_string( \"\"\" a = [1, 2] [3, 4]", "0, 1, 2, 3, 4, 5, 6] def test_self_append_array(self): config", "config = ConfigFactory.parse_file(\"samples/animals.conf\") assert config.get('cat.garfield.say') == 'meow' assert config.get('dog.mutt.hates.garfield.say') ==", "config['a'] == 'a' assert config['b'] == 'b' assert config['c'] ==", "def test_parse_string_with_duration_with_long_unit_name(self): config = ConfigFactory.parse_string( \"\"\" a: foo b: 10", "1, b: 2}, {a: 3, c: 4} ${b}, {a: 3}", "config['b'] == -121.22 assert config['c'] == .54 assert config['d'] ==", "{ \"c\": 5 } } k { \"b.f.d\": 7 }", "'test_no_quotes': 'abc\\n\\n', 'test_quotes': 'abc\\n\\n' } def test_multi_line_escape(self): config = ConfigFactory.parse_string(", "== 5 assert config['c'] == 6 def test_assign_int(self): config =", "\"\"\" ) assert config.get('a.b') == 1 assert config.get('a.c') == 2", "'d': 3 } config4 = ConfigFactory.parse_string( \"\"\" name: foo \"\"\"", "config = ConfigFactory.parse_string(data_set[0]) assert config['a'] == data_set[1] except Exception: pass", "aa ${common_modules} \"\"\" ) with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules =", "== 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string( \"\"\" a {", "= ConfigFactory.parse_string( \"\"\" database { host = localhost port =", ") config2 = ConfigFactory.parse_string( \"\"\" b1 : { v1: 2,", "6}') assert 3 == config_tree.pop('a.b', 5) assert 5 == config_tree.pop('a.c',", "assert config3.get('data-center-east.name') == 'east' assert config3.get('data-center-east.opts') == '-Xmx4g' config4 =", ") def test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2]", "\"\"\" a = foo bar \"\"\") assert config == {", ": ${foo.d}, b : 1 } bar.b = 3 //", "'peopledb' assert config['database.pass'] == '<PASSWORD>' def test_substitution_multiple_override(self): config = ConfigFactory.parse_string(", "} database { host = ${?DB_HOST} } database { host", "= ConfigFactory.parse_string( \"\"\" num = 3 retries_msg = You have", "== { 'data-center-generic': {'cluster-size': 8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'},", "3 assert config.get_int('o3.foo.b') == 3 assert config.get_int('o1.foo.c', default=42) == 42", "= {a: 1} x += {b: 2} \"\"\" ) assert", "test_self_append_string(self): ''' Should be equivalent to x = abc x", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} \"\"\" ) with pytest.raises(ConfigSubstitutionException):", "relativedelta(months=1)), ('a: 1months', relativedelta(months=1)), ('a: 2 month', relativedelta(months=2)), ('a: 3", "e = 7 } \"\"\" ) assert config.get('a.b.c') == 7", "= { x: 1 } \"\"\" ) config2 = ConfigFactory.parse_string(", "= ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree == { 'a': ['foo\"', \"bar\"]", "8 3, # comment 9 ] } # comment 10", "= 5\") assert config['foo'] == 5 def test_dos_chars_with_float_noeol(self): config =", "= # test # test2 { c: 3, d: 4,}", "d def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] = 3 d['apple']", "6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\" list = [", "test2 { c: 3, d: 4,} c = { e:", "= a b c b = 5 b c =", "42, baz : ${bar.foo} } bar : { foo :", "https://github.com/lightbend/config/blob/master/HOCON.md#unchanged-from-json \"\"\" source = r\"\"\" { \"plain-backslash\": \"\\\\\", \"tab\": \"\\t\",", "= 5 b = test a = foo \"bar\" ${b}", "test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string(", "= people name = peopledb pass = <PASSWORD> } user=test_user", "== 42 assert config['y'] == 42 @pytest.mark.xfail def test_include_substitution2(self): with", "[4] \"4\"') with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string('a = \"4\" [5]') with pytest.raises(ConfigWrongTypeException):", "12.12321 long1 = 121.22E3423432 neg_long1 = 121.22E-1 long2 = 121.22e3423432", "user=test_user pass=<PASSWORD> database { user = ${user} pass = ${pass}", "available assert dict(config.as_plain_ordered_dict()) == {'a': 1, 'b': 2, 'c': 3}", "${x} {c: 3} x = {z: 0} ${x} x =", "config = ConfigFactory.parse_string(\"foo = 5\") assert config['foo'] == 5 def", "host_modules = [java] ${common_modules} \"\"\" ) assert config2.get('host_modules') == ['java',", "period(microseconds=114)), ('a: 110 milliseconds', timedelta(milliseconds=110)), ('a: 111 millisecond', timedelta(milliseconds=111)), ('a:", "= { foo : { b : 3 c :", "abc\\\\\"test \"\"\") assert 'abc\"test' == config['quoted'] assert 'abc\"test' == config['unquoted']", "pass = ${pass} } \"\"\") assert config['database.user'] == 'test_user' assert", "${base.bar} [\"b\"] } sub2: ${sub} \"\"\" ) assert config.get_list('base.bar') ==", "('a: 112 micros', period(microseconds=112)), ('a: 113 micro', period(microseconds=113)), ('a: 114", "me e = 7 } \"\"\" ) assert config.get('a.b.c') ==", "${b} b = ${c} c = ${a} \"\"\") def test_assign_number_with_eol(self):", "== 5 assert config1.get('d') == 5 config2 = ConfigFactory.parse_string( \"\"\"", "with pytest.raises(IOError): ConfigFactory.parse_string( \"\"\" a: [ include required(\"dummy.txt\") 3 4", "\"e\": 65 } } } } assert expected_result == config", "= [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\")", "assert config.get('app.java_opts') == [ '-Xms128m', '-Xmx128m' ] def test_int_substitutions(self): config1", "bar : { foo : 42, baz : ${bar.foo} }", "${b} {c: 6}, ] \"\"\" ) assert config['a'] == [", "\"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g', '128mm' ] assert", "'a': 'foo bar test dummy', 'c': 'foo 5 bv', 'd':", "\"\"\") assert config == { 'string_from_env': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment')", "ConfigFactory.parse_string( \"\"\" application.foo = 128mm application.large-jvm-opts = [\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}]", "def test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" a = 45 b", "foo b: 10 weeks c: bar \"\"\" ) assert config['b']", "bv', 'd': 'foo 5 43' } def test_complex_substitutions(self): config =", "\"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\") == value", "def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\" num = 3 retries_msg", "\"\"\" x = {a: 1, b: 2} x = ${x}", "monkeypatch): temp_dir = tempfile.mkdtemp() try: module_dir = os.path.join(temp_dir, 'my_module') module_conf", "with pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" x = ${x} x = ${x}", "${Test} Test { field1 = 1 field2 = ${Test.field1}\"2\" field3", "= ConfigFactory.parse_string(input_string) assert config.get_string(u'www.sample.com.us.name') == 'first domain' assert config.get_string(u'www.example-ö.com.us.name') ==", "test' assert config.get('a.b') == 'test' assert config.get_string('a.b') == 'test' assert", "${c} c = ${a} \"\"\") def test_assign_number_with_eol(self): config = ConfigFactory.parse_string(", "folder and necessary files (__init__ and config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'),", "= str e = \"str \" } } d =", "{b:java} \\ {c:python} \"\"\" ) assert config['common_modules'] == {'a': 'perl',", "pytest.raises(ParseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['+', '`', '^', '?', '!',", "${a.b.e} me } \"\"\" ) assert config3.get('a.b.c') == 'str' assert", "3]\") assert config == [1, 2, 3] def test_quoted_key_with_dots(self): config", "config2.with_fallback(config1) assert config2.get(\"list\") == [1, 2, 3, 4, 5, 6]", "5 c = 6 \"\"\" ) assert config['a'] == 4", "config2.with_fallback(config1) assert result.get(\"string\") == 'abcdef' # test no mutation on", "2] [3, 4] [ 5, 6 ] \"\"\" ) assert", "['a', 'b'] assert config['b'] == ['c', 'd'] assert config['c'] ==", "value = \"hey man{}\".format(forbidden_char) config = ConfigFactory.parse_string('a: \"{}\"'.format(value)) assert config.get_string(\"a\")", "test2 { c: 3, d: 4,} c { e: 5,", "def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\" x = {y: {z:", "6 } def test_dos_chars_with_unquoted_string_noeol(self): config = ConfigFactory.parse_string(\"foo = bar\") assert", "'f': 4} ] def test_list_of_lists_with_merge(self): config = ConfigFactory.parse_string( \"\"\" b", "} def test_from_dict_with_dict(self): d = { 'banana': 3, 'apple': 4,", "true e.y = { f: 7 g: \"hey dude!\" h:", "4, 5, 6] def test_self_append_array(self): config = ConfigFactory.parse_string( \"\"\" x", "// comment 12 \"\"\" ) assert config.get('c') == 'test' assert", "config1 = ConfigFactory.parse_string( \"\"\" { a: { b: { c", "'5' } assert config.get_int('int_from_env') == 5 def test_unicode_dict_key(self): input_string =", "period(microseconds=11)), ('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a:", "\"first domain\" } } www.example-ö.com { us { name =", "config2.get('d') == 'test str' assert config2.get('f') == 'test str '", "config = ConfigFactory.parse_string( \"\"\" x = 5 b = test", "'55', 'c': '5 5' } def test_dict_substitutions(self): config = ConfigFactory.parse_string(", "ConfigFactory.parse_string('a:[\"\"\"foo\"\"\"\", \"bar\"]') assert config_tree == { 'a': ['foo\"', \"bar\"] }", ") (one, two, three) = config.get(\"x\") assert one == {'x':", "1, b: 2}') fdin.flush() config1 = ConfigFactory.parse_string( \"\"\" a: {{", "6 } data-center-east = ${data-center-generic} {name = \"east\"} \"\"\" )", "{'a': 'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self): config =", "'test str' assert config2.get('f') == 'test str ' config3 =", "test_self_ref_substitution_string_opt_concat(self): ''' Example from HOCON spec ''' config = ConfigFactory.parse_string(", "'python'] def test_concat_multi_line_dict(self): config = ConfigFactory.parse_string( \"\"\" common_modules = {a:perl}", "data-center-east = ${data-center-generic} data-center-east = { name = \"east\" }", "a: [ ${b} {a: 1, b: 2}, {a: 3, c:", "config3.get('data-center-east.opts') == '-Xmx4g' config4 = ConfigFactory.parse_string( \"\"\" data-center-generic = {", "assert config2.get('data-center-east.name') == 'east' config3 = ConfigFactory.parse_string( \"\"\" data-center-generic =", "${common_modules} [perl] full_modules = ${host_modules} [c, go] \"\"\" ) assert", "== 433 assert config2['database']['url'] == 'other.host.net:433' def test_fallback_substitutions_overwrite(self): config1 =", "} sub : ${base} { baz: ${base.bar} [\"b\"] } sub2:", "a = { b: 1 c: 2 } \"\"\" )", "1 b : 2 } } o2 = { foo", "assert 'milk' in types def test_list_of_dicts(self): config = ConfigFactory.parse_string( \"\"\"", "test_fail_parse_forbidden_characters(self, forbidden_char): with pytest.raises(ParseBaseException): ConfigFactory.parse_string('a: hey man{}'.format(forbidden_char)) @pytest.mark.parametrize('forbidden_char', ['$', '\"'])", "= ${data-center-east} {tmpDir=/tmp} \"\"\" ) assert config4.get('data-center-east.cluster-size') == 6 assert", "= ${x} [3,4] x = [-1, 0] ${x} [5, 6]", "\"\"\" a: [ {a: 1, b: 2}, {a: 3, c:", "= { 'banana': 3, 'apple': 4, 'pear': 1, 'orange': 2,", "assert merged.get(\"b1\") == {\"v1\": 2, \"v2\": 3} b = merged.get(\"b\")", "\"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert '{\"critical\":\"0.00\",\"warning\":\"99.99\"}' == config['value'] def", "config['common_modules'] == {'a': 'perl', 'b': 'java', 'c': 'python'} def test_parse_URL_from_samples(self):", "5, 'f': 6} def test_assign_dict_strings_no_equal_sign_with_eol(self): config = ConfigFactory.parse_string( \"\"\" a", "5 } } a.b { c = 7 d =", "\"\"\" { a : { include \"\"\" + '\"' +", "pytest.raises(ConfigSubstitutionException): ConfigFactory.parse_string( \"\"\" common_modules = ${non_existent} abc \"\"\" ) with", "nanos', period(microseconds=1120)), ('a: 1130000 nano', period(microseconds=1130)), ('a: 1140000 ns', period(microseconds=1140)),", "= ${host_modules} [c, go] \"\"\" ) assert config4.get('common_modules') == ['php',", "{ \"i\": { \"m\": 7, \"d\": 5, \"e\": 65 }", "'a': { 'garfield': { 'say': 'meow' }, 't': 2 }", "\"a.b.c.d\": 3 t { \"d\": { \"c\": 5 } }", "'c': 2, 'd': 3 } config4 = ConfigFactory.parse_string( \"\"\" name:", "assert result.get(\"string\") == 'abcdef' # test no mutation on config1", "43, 'baz': 43} assert set(config.keys()) == set(['bar']) def test_self_ref_substitution_dict_otherfield_merged_in_mutual(self): '''", "config = ConfigFactory.parse_string( \"\"\" database { name = peopledb pass", "assert config.get(\"x.z\") == {'x': [3, 4], 'y': [5, 6]} def", ") config6 = config4.with_fallback(config5) assert config6 == { 'longName': 'long", "test2 5 c = 6 \"\"\" ) assert config['a'] ==", "''' Should be equivalent to x = ${?x} def '''", "cluster-size = 6 } data-center-east = ${data-center-generic} data-center-east = {", "['perl', 'java', 'python'] def test_concat_multi_line_list(self): config = ConfigFactory.parse_string( \"\"\" common_modules", "{a: 1, b: 2}, {a: 3, c: 4} ${b}, {a:", "'foo 5 bv', 'd': 'foo 5 43' } def test_complex_substitutions(self):", "assert config.get(\"foo\") == {'c': 3, 'd': 4} assert set(config.keys()) ==", "== 4 def test_concat_string(self): config = ConfigFactory.parse_string( \"\"\" a =", "{ database { host = \"localhost\" port = 8000 url", "{ c = 5 \"d\" = true e.y = {", "dude!\" h: hey man i = \\\"\\\"\\\" \"first line\" \"second\"", "= You have ${num} retries retries_msg = ${?CUSTOM_MSG} \"\"\") assert", "[-3, -2] ${x} \"\"\" ) assert config.get(\"x\") == [-3, -2,", "8}, 'data-center-east': {'cluster-size': 8, 'name': 'east'}, 'misc': 'mist', 'default-jvm-opts': ['-XX:+UseParNewGC'],", "= ${TRUE_OR_FALSE} \"\"\") assert config == { 'bool_from_env': 'false' }", "== 7 assert config.get('t.e.y.g') == 'hey dude!' assert config.get('t.e.y.h') ==", "config = ConfigFactory.parse_string( \"\"\" a { a: 1, b: 2,", "= 6 test # comment 0 # comment 3 a:", "test_self_ref_substitution_array(self): config = ConfigFactory.parse_string( \"\"\" x = [1,2] x =", "{ } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged)", "= config4.with_fallback(config5) assert config6 == { 'longName': 'long foo', 'name':", ": ${a} { } \"\"\", resolve=False ) merged = ConfigTree.merge_configs(config1,", "[\"-XX:+UseParNewGC\"] [-Xm16g, ${application.foo}] application.large-jvm-opts2 = [-Xm16g, ${application.foo}] [\"-XX:+UseParNewGC\"] \"\"\") assert", "test # test2 \"b\" c = \"c\" \"\"\" ) assert", "{ 'banana': 3, 'apple': 4, 'pear': 1, 'orange': 2, }", "test_assign_int(self): config = ConfigFactory.parse_string( \"\"\" short = 12 long =", "c d e') == 'test3' def test_dict_merge(self): config = ConfigFactory.parse_string(", "config) os.mkdir(module_dir) open(os.path.join(module_dir, '__init__.py'), 'a').close() with open(module_conf, 'w') as fdin:", "assert config3.get('d') == 'test str me' assert config3.get('f') == 'test", "} \"\"\" ) assert config2['database']['host'] == 'other.host.net' assert config2['database']['port'] ==", "test_escape_quote_complex(self): config = ConfigFactory.parse_string( \"\"\" value: \"{\\\\\"critical\\\\\":\\\\\"0.00\\\\\",\\\\\"warning\\\\\":\\\\\"99.99\\\\\"}\" \"\"\" ) assert", "config = ConfigFactory.parse_string( \"\"\" // comment 1 # comment 2", "= <PASSWORD> name = ${?NOT_EXISTS} pass = ${?NOT_EXISTS} } \"\"\")", "('a: 111 millisecond', timedelta(milliseconds=111)), ('a: 112 millis', timedelta(milliseconds=112)), ('a: 113", "config.get(\"a\") == {'b': 3, 'c': [1, 2], 'd': {'foo': 'bar'}}", "4}, {'a': 3, 'c': 4, 'f': 4}, {'a': 3, 'c':", "test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\" string_from_env = ${STRING_VAR} \"\"\") assert", "\"\"\") assert 'r' in config_tree['foo'] and 't' in config_tree['foo'] and", "def test_substitution_nested_override(self): config = ConfigFactory.parse_string( \"\"\" database { name =", "bar' } def test_quoted_unquoted_strings_with_ws(self): config = ConfigFactory.parse_string( \"\"\" a =", "'c': 5} == config1 def test_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\"", "relativedelta(years=3)), ('a: 3y', relativedelta(years=3)), ]) def test_parse_string_with_duration_optional_units(data_set): config = ConfigFactory.parse_string(data_set[0])", "an int but on python 2 long with be a", "${a.b.e} } \"\"\" ) assert config2.get('a.b.c') == 'str' assert config2.get('d')", "def \"\"\" ) def test_non_compatible_substitution(self): with pytest.raises(ConfigWrongTypeException): ConfigFactory.parse_string( \"\"\" common_modules", "7, 8] ] def test_invalid_assignment(self): with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('common_modules [perl]') with", "assert config['long1'] == 121.22E3423432 assert config['neg_long1'] == 121.22E-1 assert config['long2']", "'right' assert config.get(\"b2\")[1]['VAR'] == 'right' def test_escape_sequences_json_equivalence(self): \"\"\" Quoted strings", "config = ConfigFactory.parse_string( \"\"\" b = [5, 6] a: [", "except Exception: from datetime import timedelta as period class TestConfigParser(object):", "2 long with be a long assert config['short'] == 12.12321", "{ b: { c = ${e} } } d =", "4 b: test, # comment 5 } # comment 6", "three == 3 def test_self_ref_substitution_dict_path(self): config = ConfigFactory.parse_string( \"\"\" x", "= foo \"bar\" ${b} dummy c = foo ${x} bv", "\"\"\" ) assert config5['data-center-east'] == { 'name': 'east', 'cluster-size': 6", "config = ConfigFactory.parse_string( \"\"\" a = // abc abc c", "'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, TRUE_OR_FALSE='false') def test_bool_from_environment(self): config = ConfigFactory.parse_string(", "= ConfigFactory.parse_string( \"\"\" list = [ 1, 2, 3 ]", "{ v1: 2, v2: 3 } \"\"\", resolve=False ) merged", "datetime import timedelta from pyparsing import ParseBaseException, ParseException, ParseSyntaxException import", "[perl] \"\"\" ) assert config3.get('common_modules') == ['php', 'python'] assert config3.get('host_modules')", "config.get('b.d') == 4 def test_concat_string(self): config = ConfigFactory.parse_string( \"\"\" a", "= ConfigFactory.parse_string( \"\"\" app.heap_size = 128 app.java_opts = [ -Xms${app.heap_size}m", "config.get('a') is None assert config.get('b')[0] is None def test_parse_override(self): config", "('a: 1110000 nanosecond', period(microseconds=1110)), ('a: 1120000 nanos', period(microseconds=1120)), ('a: 1130000", "pass = <PASSWORD> } database { name = ${?user} pass", "12 w', period(weeks=12)), ('a: 10 days', period(days=10)), ('a: 11 day',", "== config['value'] def test_keys_with_slash(self): config = ConfigFactory.parse_string( \"\"\" /abc/cde1: abc", "5 } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment(self): config = ConfigFactory.parse_string( \"\"\"", "== { 'STRING_VAR': 'value_from_environment' } @mock.patch.dict(os.environ, STRING_VAR='value_from_environment') def test_string_from_environment_self_ref_optional(self): config", "[-Xm16g, ${application.foo}] ${application.default-jvm-opts} \"\"\") assert config[\"application.large-jvm-opts\"] == [ '-XX:+UseParNewGC', '-Xm16g',", "assert config['h'] == 1 def test_cascade_optional_substitution(self): config = ConfigFactory.parse_string( \"\"\"", "['foo\"', \"bar\"] } def test_pop(self): config_tree = ConfigFactory.parse_string('a:{b: 3, d:", "config['e'] == 45 assert 'g' not in config assert config['h']", "{c: 2} b: {c: 3} {d: 4} { c: 5", "test_substitution_flat_override(self): config = ConfigFactory.parse_string( \"\"\" database { name = peopledb", "\"\"\" a: 1 b: ${c} { pa: [${a}] pb: ${b.pa}", ": { v1: 1 } b = [${b1}] \"\"\", resolve=False", "== 10 names = {bar['name'] for bar in bars} types", "config2.get('host_modules') == ['java', 'php', 'python'] config3 = ConfigFactory.parse_string( \"\"\" common_modules", "== [ [1, 2], [3, 4] ] def test_list_of_dicts_with_merge(self): config", "b: 2, } b = # test # test2 {", "e: 5, f: 6 } \"\"\" ) assert config['a'] ==", "bar} print(types, '(((((') assert '<NAME>' in names assert 'Homer\\'s favorite", "= \"foo\" \"bar \"{ws}// comment \"\"\".format(ws=' ')) assert config ==", "43 } \"\"\" ) assert config.get(\"bar\") == {'foo': 43, 'baz':", "= 5 } } d = test ${a.b.c} me }", "2], [3, 4, 5, 6], [1, 2, 5, 6, 7,", ") def test_resolve_package_path(self): path = ConfigParser.resolve_package_path(\"pyhocon:config_parser.py\") assert os.path.exists(path) def test_resolve_package_path_format(self):", "sec'), ('a: 7 hours', period(hours=7)), ('a: 8 hour', period(hours=8)), ('a:", "test_dos_chars_with_quoted_string_noeol(self): config = ConfigFactory.parse_string('foo = \"5\"') assert config['foo'] == '5'", "d['tree'] = { 'a': 'abc\\ntest\\n', 'b': [1, 2, 3] }", "{ name = \"second domain\" } } \"\"\" config =", ") assert config2.get('host_modules') == ['java', 'php', 'python'] config3 = ConfigFactory.parse_string(", "{'v2': 2} assert b[1] == {'v1': 1, 'v2': 3} def", "# test no mutation on config1 assert result is not", "[3, 4], 'y': [5, 6]} def test_self_ref_substitiotion_dict_in_array(self): config = ConfigFactory.parse_string(", "config_tree = ConfigFactory.parse_string('a:{b: 3, d: 6}') assert 3 == config_tree.pop('a.b',", "\"\"\" foo : { a : { c : 1", "== d def test_from_dict_with_ordered_dict(self): d = OrderedDict() d['banana'] = 3", "5, 6] def test_fallback_self_ref_substitutions_append_plus_equals(self): config1 = ConfigFactory.parse_string( \"\"\" list =", "{c: 6}, ] \"\"\" ) assert config['a'] == [ {'a':", "a = 5 \"\"\" ) # b is not set", "= { f: 5 g } \"\"\") with pytest.raises(ParseSyntaxException): ConfigFactory.parse_string('a", ") merged = ConfigTree.merge_configs(config1, config2) ConfigParser.resolve_substitutions(merged) assert merged.get(\"c.d\") == [1]", "[3,4]} x = [${x}, 2, 3] \"\"\" ) (one, two,", "b: ${c} { pa: [${a}] pb: ${b.pa} } c: {", "bar \"\"\") assert config == { 'a': 'foo bar' }", "{name = \"east\"} ${data-center-generic} \"\"\" ) assert config2.get('data-center-east.cluster-size') == 6", "baz: ${base.bar} [\"b\"] } sub2: ${sub} \"\"\" ) assert config.get_list('base.bar')", "assert config1['a'] == expected_res config2 = ConfigFactory.parse_string( \"\"\" a: {{", ") assert config.get('foo') == {'a': 2, 'c': 1} assert set(config.keys())" ]
[ "(human-readable, in form of a junit report, etc.) \"\"\" failure", "the scenario and wait for it to finish/fail \"\"\" print(\"ScenarioManager:", "self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self):", "self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns:", "it also handles the ticking of the world. \"\"\" if", "self.start_system_time self.scenario_duration_game = end_game_time - start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE:", "AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from", "analyze_scenario(self, stdout, filename, junit): \"\"\" This function is intended to", "self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick ---------\\n\") #", "timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time = GameTime.get_time()", "provide the final statistics about the scenario (human-readable, in form", "scenario manager.run_scenario() This function is designed to explicitly control start", "agent=None): \"\"\" Load a new scenario \"\"\" self._reset() self._agent =", "- \\ self.start_system_time self.scenario_duration_game = end_game_time - start_game_time if self.scenario_tree.status", "self._watchdog.get_status() def stop_scenario(self): \"\"\" This function is used by the", "user must not modify this class. To use the ScenarioManager:", "!= \"ACCEPTABLE\"): failure = True result = \"FAILURE\" elif criterion.test_status", "{}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running =", "import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider", "new scenario \"\"\" self._reset() self._agent = AgentWrapper(agent) if agent else", "\"ACCEPTABLE\"): failure = True result = \"FAILURE\" elif criterion.test_status ==", "sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False if self._agent", "scenario and the agent. If running synchornously, it also handles", "self.end_system_time = None def _reset(self): \"\"\" Reset all parameters \"\"\"", "self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns: bool: False", "Intel Corporation # # This work is licensed under the", "None: ego_action = self._agent() # Tick scenario self.scenario_tree.tick_once() if self._debug_mode:", "class holds all functionality required to start, and analyze a", "Load a scenario via manager.load_scenario() 3. Trigger the execution of", "def cleanup(self): \"\"\" This function triggers a proper termination of", "self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and", "the scenario execution \"\"\" self._running = False def analyze_scenario(self, stdout,", "result = \"SUCCESS\" if self.scenario.test_criteria is None: print(\"Nothing to analyze,", "self._running = True while self._running: timestamp = None world =", "_tick_scenario(self, timestamp): \"\"\" Run next tick of scenario and the", "if self.scenario.timeout_node.timeout and not failure: timeout = True result =", "ego_action = self._agent() # Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\")", "while self._running: timestamp = None world = CarlaDataProvider.get_world() if world:", "intended to be called from outside and provide the final", "ScenarioManager() 2. Load a scenario via manager.load_scenario() 3. Trigger the", "= sync_mode self._running = False self._timestamp_last_run = 0.0 self._timeout =", "analyze, this scenario has no criteria\") return True for criterion", "to failure\") def _tick_scenario(self, timestamp): \"\"\" Run next tick of", "failure = True result = \"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\":", "to analyze, this scenario has no criteria\") return True for", "end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time - \\ self.start_system_time self.scenario_duration_game", "\"SUCCESS\" if self.scenario.test_criteria is None: print(\"Nothing to analyze, this scenario", "self.scenario.test_criteria is None: print(\"Nothing to analyze, this scenario has no", "= 0.0 self.start_system_time = None self.end_system_time = None def _reset(self):", "class. To use the ScenarioManager: 1. Create an object via", "import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog", "ScenarioManager(object): \"\"\" Basic scenario manager class. This class holds all", "evaluation with manager.analyze_scenario() 5. If needed, cleanup with manager.stop_scenario() \"\"\"", "False result = \"SUCCESS\" if self.scenario.test_criteria is None: print(\"Nothing to", "scenario \"\"\" self._reset() self._agent = AgentWrapper(agent) if agent else None", "the final statistics about the scenario (human-readable, in form of", "agent. If running synchornously, it also handles the ticking of", "scenario \"\"\" if self.scenario is not None: self.scenario.terminate() if self._agent", "0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None", "snapshot = world.get_snapshot() if snapshot: timestamp = snapshot.timestamp if timestamp:", "of a junit report, etc.) \"\"\" failure = False timeout", "manager.load_scenario() 3. Trigger the execution of the scenario manager.run_scenario() This", "= False if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode", "handles the ticking of the world. \"\"\" if self._timestamp_last_run <", "time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not", "import GameTime from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): \"\"\" Basic", "only! \"\"\" from __future__ import print_function import sys import time", "report, etc.) \"\"\" failure = False timeout = False result", "if self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\"", "load_scenario() \"\"\" self.scenario = None self.scenario_tree = None self.scenario_class =", "is not None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def load_scenario(self,", "manager.run_scenario() This function is designed to explicitly control start and", "GameTime.get_time() self.scenario_duration_system = self.end_system_time - \\ self.start_system_time self.scenario_duration_game = end_game_time", "statistics about the scenario (human-readable, in form of a junit", "None self.ego_vehicles = None self.other_actors = None self._debug_mode = debug_mode", "game time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is", "self._running = False self._timestamp_last_run = 0.0 self._timeout = timeout self._watchdog", "None self.other_actors = None self._debug_mode = debug_mode self._agent = None", "self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors # To print", "None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None):", "None self.end_system_time = None def _reset(self): \"\"\" Reset all parameters", "Reset all parameters \"\"\" self._running = False self._timestamp_last_run = 0.0", "execution of the scenario manager.run_scenario() This function is designed to", "criterion.test_status != \"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"): failure = True", "= GameTime.get_time() self._watchdog.start() self._running = True while self._running: timestamp =", "self._reset() self._agent = AgentWrapper(agent) if agent else None if self._agent", "overall signal handler to terminate the scenario execution \"\"\" self._running", "self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick ---------\\n\")", "import sys import time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper", "criterion.test_status == \"ACCEPTABLE\": result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not", "scenario tree uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent", "None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\" Trigger the start of", "Tick ---------\\n\") # Update game time and actor information GameTime.on_carla_tick(timestamp)", "self._running = False if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if", "= False result = \"SUCCESS\" if self.scenario.test_criteria is None: print(\"Nothing", "self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\" Trigger the start of the", "GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not None: ego_action = self._agent()", "via manager.load_scenario() 3. Trigger the execution of the scenario manager.run_scenario()", "wait for it to finish/fail \"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name))", "\"\"\" returns: bool: False if watchdog exception occured, True otherwise", "self._timeout = timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game", "scenario.ego_vehicles self.other_actors = scenario.other_actors # To print the scenario tree", "def load_scenario(self, scenario, agent=None): \"\"\" Load a new scenario \"\"\"", "_reset(self): \"\"\" Reset all parameters \"\"\" self._running = False self._timestamp_last_run", "be filled at load_scenario() \"\"\" self.scenario = None self.scenario_tree =", "Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time = GameTime.get_time() self._watchdog.start()", "a scenario. The user must not modify this class. To", "criteria\") return True for criterion in self.scenario.get_criteria(): if (not criterion.optional", "start of the scenario and wait for it to finish/fail", "The user must not modify this class. To use the", "in form of a junit report, etc.) \"\"\" failure =", "the start of the scenario and wait for it to", "use the ScenarioManager: 1. Create an object via manager =", "tree uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is", "self.start_system_time = None self.end_system_time = None GameTime.restart() def cleanup(self): \"\"\"", "parameters \"\"\" self._running = False self._timestamp_last_run = 0.0 self.scenario_duration_system =", "self.scenario = None self.scenario_tree = None self.scenario_class = None self.ego_vehicles", "end_game_time - start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due", "GameTime.restart() def cleanup(self): \"\"\" This function triggers a proper termination", "from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object):", "and not failure: timeout = True result = \"TIMEOUT\" output", "criterion in self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status != \"SUCCESS\"", "manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups up", "self.scenario.timeout_node.timeout and not failure: timeout = True result = \"TIMEOUT\"", "= snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time()", "CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns: bool: False if watchdog exception", "is not None: self._sync_mode = True self.scenario_class = scenario self.scenario", "is None: print(\"Nothing to analyze, this scenario has no criteria\")", "modified and is for reference only! \"\"\" from __future__ import", "# For a copy, see <https://opensource.org/licenses/MIT>. \"\"\" This module provides", "not None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def load_scenario(self, scenario,", "CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot() if snapshot: timestamp =", "manager = ScenarioManager() 2. Load a scenario via manager.load_scenario() 3.", "def analyze_scenario(self, stdout, filename, junit): \"\"\" This function is intended", "False self._timestamp_last_run = 0.0 self._timeout = timeout self._watchdog = Watchdog(float(self._timeout))", "0.0 self.start_system_time = None self.end_system_time = None GameTime.restart() def cleanup(self):", "criterion.test_status != \"ACCEPTABLE\"): failure = True result = \"FAILURE\" elif", "None def _reset(self): \"\"\" Reset all parameters \"\"\" self._running =", "function is intended to be called from outside and provide", "synchornously, it also handles the ticking of the world. \"\"\"", "of a scenario \"\"\" if self.scenario is not None: self.scenario.terminate()", "\"ACCEPTABLE\": result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not failure: timeout", "provides the ScenarioManager implementation. It must not be modified and", "self.other_actors = None self._debug_mode = debug_mode self._agent = None self._sync_mode", "py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False", "snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time", "ScenarioManager implementation. It must not be modified and is for", "if self._debug_mode: print(\"\\n--------- Tick ---------\\n\") # Update game time and", "# Copyright (c) 2018-2020 Intel Corporation # # This work", "otherwise \"\"\" return self._watchdog.get_status() def stop_scenario(self): \"\"\" This function is", "designed to explicitly control start and end of the scenario", "AgentWrapper(agent) if agent else None if self._agent is not None:", "self._agent = AgentWrapper(agent) if agent else None if self._agent is", "else None if self._agent is not None: self._sync_mode = True", "scenario.other_actors # To print the scenario tree uncomment the next", "not modify this class. To use the ScenarioManager: 1. Create", "This function triggers a proper termination of a scenario \"\"\"", "To print the scenario tree uncomment the next line #", "signal handler to terminate the scenario execution \"\"\" self._running =", "If needed, cleanup with manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False, sync_mode=False,", "Load a new scenario \"\"\" self._reset() self._agent = AgentWrapper(agent) if", "time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import", "None world = CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot() if", "of the world. \"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds and self._running:", "the ScenarioManager: 1. Create an object via manager = ScenarioManager()", "holds all functionality required to start, and analyze a scenario.", "explicitly control start and end of the scenario execution 4.", "This function is designed to explicitly control start and end", "= None self.other_actors = None self._debug_mode = debug_mode self._agent =", "\"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds", "next tick of scenario and the agent. If running synchornously,", "bool: False if watchdog exception occured, True otherwise \"\"\" return", "- start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to", "if agent else None if self._agent is not None: self._sync_mode", "scenario execution 4. Trigger a result evaluation with manager.analyze_scenario() 5.", "agent else None if self._agent is not None: self._sync_mode =", "time.time() end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time - \\ self.start_system_time", "None: self._sync_mode = True self.scenario_class = scenario self.scenario = scenario.scenario", "if self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update()", "srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import", "to explicitly control start and end of the scenario execution", "self._agent = None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): \"\"\" Load", "!= \"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"): failure = True result", "= True result = \"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\": result", "import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime", "= \"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\": result = \"ACCEPTABLE\" if", "self._running = False self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game", "of the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>.", "timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick ---------\\n\") # Update game", "__init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups up the parameters, which", "information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not None: ego_action =", "self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running", "and analyze a scenario. The user must not modify this", "#!/usr/bin/env python # Copyright (c) 2018-2020 Intel Corporation # #", "self.scenario is not None: self.scenario.terminate() if self._agent is not None:", "self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to failure\") def _tick_scenario(self,", "timeout=2.0): \"\"\" Setups up the parameters, which will be filled", "\"\"\" Reset all parameters \"\"\" self._running = False self._timestamp_last_run =", "get_running_status(self): \"\"\" returns: bool: False if watchdog exception occured, True", "stop_scenario(self): \"\"\" This function is used by the overall signal", "= scenario self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles =", "\"\"\" Basic scenario manager class. This class holds all functionality", "is not None: self.scenario.terminate() if self._agent is not None: self._agent.cleanup()", "debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups up the parameters, which will", "Trigger the execution of the scenario manager.run_scenario() This function is", "timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time =", "is for reference only! \"\"\" from __future__ import print_function import", "# To print the scenario tree uncomment the next line", "is not None: ego_action = self._agent() # Tick scenario self.scenario_tree.tick_once()", "print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running =", "ticking of the world. \"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds and", "= debug_mode self._agent = None self._sync_mode = sync_mode self._running =", "manager class. This class holds all functionality required to start,", "= None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): \"\"\" Load a", "tick of scenario and the agent. If running synchornously, it", "must not modify this class. To use the ScenarioManager: 1.", "= None world = CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot()", "up the parameters, which will be filled at load_scenario() \"\"\"", "None self._debug_mode = debug_mode self._agent = None self._sync_mode = sync_mode", "terms of the MIT license. # For a copy, see", "scenario self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles", "print(\"Nothing to analyze, this scenario has no criteria\") return True", "output = ResultOutputProvider(self, result, stdout, filename, junit) output.write() return failure", "is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and self._watchdog.get_status():", "(c) 2018-2020 Intel Corporation # # This work is licensed", "if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\"", "= None GameTime.restart() def cleanup(self): \"\"\" This function triggers a", "analyze a scenario. The user must not modify this class.", "= None self.end_system_time = None def _reset(self): \"\"\" Reset all", "the ticking of the world. \"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds", "Trigger a result evaluation with manager.analyze_scenario() 5. If needed, cleanup", "if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False if self._agent is", "and provide the final statistics about the scenario (human-readable, in", "by the overall signal handler to terminate the scenario execution", "criterion.optional and criterion.test_status != \"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"): failure", "= self._agent() # Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree,", "the MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\"", "sync_mode self._running = False self._timestamp_last_run = 0.0 self._timeout = timeout", "be modified and is for reference only! \"\"\" from __future__", "= 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time =", "self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time = None GameTime.restart()", "self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors #", "srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): \"\"\"", "False if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and", "scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running", "manager.analyze_scenario() 5. If needed, cleanup with manager.stop_scenario() \"\"\" def __init__(self,", "timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n---------", "junit): \"\"\" This function is intended to be called from", "cleanup with manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\"", "to finish/fail \"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time()", "if self._agent is not None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup()", "from __future__ import print_function import sys import time import py_trees", "\"\"\" failure = False timeout = False result = \"SUCCESS\"", "\"\"\" Run next tick of scenario and the agent. If", "load_scenario(self, scenario, agent=None): \"\"\" Load a new scenario \"\"\" self._reset()", "= time.time() end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time - \\", "None GameTime.restart() def cleanup(self): \"\"\" This function triggers a proper", "= True while self._running: timestamp = None world = CarlaDataProvider.get_world()", "py_trees.common.Status.RUNNING: self._running = False if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action)", "Setups up the parameters, which will be filled at load_scenario()", "handler to terminate the scenario execution \"\"\" self._running = False", "\"\"\" Load a new scenario \"\"\" self._reset() self._agent = AgentWrapper(agent)", "stdout, filename, junit): \"\"\" This function is intended to be", "True result = \"TIMEOUT\" output = ResultOutputProvider(self, result, stdout, filename,", "1. Create an object via manager = ScenarioManager() 2. Load", "self._sync_mode = sync_mode self._running = False self._timestamp_last_run = 0.0 self._timeout", "uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not", "self._debug_mode: print(\"\\n--------- Tick ---------\\n\") # Update game time and actor", "self.ego_vehicles = None self.other_actors = None self._debug_mode = debug_mode self._agent", "result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not failure: timeout =", "self.end_system_time = time.time() end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time -", "return self._watchdog.get_status() def stop_scenario(self): \"\"\" This function is used by", "= 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time =", "is licensed under the terms of the MIT license. #", "\"\"\" This function is used by the overall signal handler", "result evaluation with manager.analyze_scenario() 5. If needed, cleanup with manager.stop_scenario()", "debug_mode self._agent = None self._sync_mode = sync_mode self._running = False", "= None self.scenario_tree = None self.scenario_class = None self.ego_vehicles =", "Run next tick of scenario and the agent. If running", "2. Load a scenario via manager.load_scenario() 3. Trigger the execution", "print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time = GameTime.get_time()", "True result = \"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\": result =", "\"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not failure: timeout = True result", "True for criterion in self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status", "the parameters, which will be filled at load_scenario() \"\"\" self.scenario", "self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time", "= ScenarioManager() 2. Load a scenario via manager.load_scenario() 3. Trigger", "not None: self._sync_mode = True self.scenario_class = scenario self.scenario =", "form of a junit report, etc.) \"\"\" failure = False", "triggers a proper termination of a scenario \"\"\" if self.scenario", "to start, and analyze a scenario. The user must not", "line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode)", "required to start, and analyze a scenario. The user must", "function is designed to explicitly control start and end of", "None: print(\"Nothing to analyze, this scenario has no criteria\") return", "copy, see <https://opensource.org/licenses/MIT>. \"\"\" This module provides the ScenarioManager implementation.", "self.cleanup() self.end_system_time = time.time() end_game_time = GameTime.get_time() self.scenario_duration_system = self.end_system_time", "0.0 self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time = None", "self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if", "---------\\n\") # Update game time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick()", "self.scenario_duration_system = self.end_system_time - \\ self.start_system_time self.scenario_duration_game = end_game_time -", "this class. To use the ScenarioManager: 1. Create an object", "= None self.scenario_class = None self.ego_vehicles = None self.other_actors =", "and wait for it to finish/fail \"\"\" print(\"ScenarioManager: Running scenario", "actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not None: ego_action", "the next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None:", "def run_scenario(self): \"\"\" Trigger the start of the scenario and", "py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self):", "scenario, agent=None): \"\"\" Load a new scenario \"\"\" self._reset() self._agent", "== \"ACCEPTABLE\": result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not failure:", "failure: timeout = True result = \"TIMEOUT\" output = ResultOutputProvider(self,", "self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors", "Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time = None", "None if self._agent is not None: self._sync_mode = True self.scenario_class", "is intended to be called from outside and provide the", "scenario and wait for it to finish/fail \"\"\" print(\"ScenarioManager: Running", "licensed under the terms of the MIT license. # For", "None self.end_system_time = None GameTime.restart() def cleanup(self): \"\"\" This function", "= scenario.other_actors # To print the scenario tree uncomment the", "the scenario tree uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree) if", "returns: bool: False if watchdog exception occured, True otherwise \"\"\"", "This module provides the ScenarioManager implementation. It must not be", "self._running = False def analyze_scenario(self, stdout, filename, junit): \"\"\" This", "functionality required to start, and analyze a scenario. The user", "the world. \"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run", "= self.end_system_time - \\ self.start_system_time self.scenario_duration_game = end_game_time - start_game_time", "ScenarioManager: 1. Create an object via manager = ScenarioManager() 2.", "for criterion in self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status !=", "Terminated due to failure\") def _tick_scenario(self, timestamp): \"\"\" Run next", "module provides the ScenarioManager implementation. It must not be modified", "for it to finish/fail \"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time", "\"\"\" self.scenario = None self.scenario_tree = None self.scenario_class = None", "due to failure\") def _tick_scenario(self, timestamp): \"\"\" Run next tick", "# Update game time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if", "Corporation # # This work is licensed under the terms", "cleanup(self): \"\"\" This function triggers a proper termination of a", "if watchdog exception occured, True otherwise \"\"\" return self._watchdog.get_status() def", "= \"SUCCESS\" if self.scenario.test_criteria is None: print(\"Nothing to analyze, this", "proper termination of a scenario \"\"\" if self.scenario is not", "scenario. The user must not modify this class. To use", "= 0.0 self._timeout = timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system =", "snapshot: timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time", "\"\"\" self._running = False def analyze_scenario(self, stdout, filename, junit): \"\"\"", "result = \"TIMEOUT\" output = ResultOutputProvider(self, result, stdout, filename, junit)", "print the scenario tree uncomment the next line # py_trees.display.render_dot_tree(self.scenario_tree)", "= CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot() if snapshot: timestamp", "timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0", "False timeout = False result = \"SUCCESS\" if self.scenario.test_criteria is", "self.end_system_time - \\ self.start_system_time self.scenario_duration_game = end_game_time - start_game_time if", "filename, junit): \"\"\" This function is intended to be called", "timestamp): \"\"\" Run next tick of scenario and the agent.", "= time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running = True while", "self._agent() # Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True)", "self._timestamp_last_run = 0.0 self._timeout = timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system", "None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): \"\"\" Load a new", "if (not criterion.optional and criterion.test_status != \"SUCCESS\" and criterion.test_status !=", "True otherwise \"\"\" return self._watchdog.get_status() def stop_scenario(self): \"\"\" This function", "from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer", "= scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors =", "license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\" This module", "GameTime.get_time() self._watchdog.start() self._running = True while self._running: timestamp = None", "scenario manager class. This class holds all functionality required to", "used by the overall signal handler to terminate the scenario", "If running synchornously, it also handles the ticking of the", "end of the scenario execution 4. Trigger a result evaluation", "an object via manager = ScenarioManager() 2. Load a scenario", "about the scenario (human-readable, in form of a junit report,", "= \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and not failure: timeout = True", "self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick ---------\\n\") # Update game time", "self._agent is not None: self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def", "False def analyze_scenario(self, stdout, filename, junit): \"\"\" This function is", "to be called from outside and provide the final statistics", "= False timeout = False result = \"SUCCESS\" if self.scenario.test_criteria", "For a copy, see <https://opensource.org/licenses/MIT>. \"\"\" This module provides the", "execution \"\"\" self._running = False def analyze_scenario(self, stdout, filename, junit):", "self._running: timestamp = None world = CarlaDataProvider.get_world() if world: snapshot", "a proper termination of a scenario \"\"\" if self.scenario is", "\"\"\" This function triggers a proper termination of a scenario", "not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick()", "the terms of the MIT license. # For a copy,", "if snapshot: timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup()", "python # Copyright (c) 2018-2020 Intel Corporation # # This", "def _tick_scenario(self, timestamp): \"\"\" Run next tick of scenario and", "if self.scenario.test_criteria is None: print(\"Nothing to analyze, this scenario has", "srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): \"\"\" Basic scenario manager class.", "must not be modified and is for reference only! \"\"\"", "all functionality required to start, and analyze a scenario. The", "import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider", "self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time = GameTime.get_time() self.scenario_duration_system =", "scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors", "not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\" Trigger the start", "True self.scenario_class = scenario self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree", "CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from", "= world.get_snapshot() if snapshot: timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp)", "= \"TIMEOUT\" output = ResultOutputProvider(self, result, stdout, filename, junit) output.write()", "elif criterion.test_status == \"ACCEPTABLE\": result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout and", "and end of the scenario execution 4. Trigger a result", "world. \"\"\" if self._timestamp_last_run < timestamp.elapsed_seconds and self._running: self._timestamp_last_run =", "None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def", "srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import", "MIT license. # For a copy, see <https://opensource.org/licenses/MIT>. \"\"\" This", "function is used by the overall signal handler to terminate", "\"\"\" This function is intended to be called from outside", "= True self.scenario_class = scenario self.scenario = scenario.scenario self.scenario_tree =", "execution 4. Trigger a result evaluation with manager.analyze_scenario() 5. If", "self.scenario.terminate() if self._agent is not None: self._agent.cleanup() self._agent = None", "if timestamp: self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time =", "self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status != \"SUCCESS\" and criterion.test_status", "control start and end of the scenario execution 4. Trigger", "def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups up the parameters,", "run_scenario(self): \"\"\" Trigger the start of the scenario and wait", "(not criterion.optional and criterion.test_status != \"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"):", "CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): \"\"\" Load a new scenario", "from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): \"\"\" Basic scenario manager", "= ResultOutputProvider(self, result, stdout, filename, junit) output.write() return failure or", "Watchdog class ScenarioManager(object): \"\"\" Basic scenario manager class. This class", "of the scenario execution 4. Trigger a result evaluation with", "It must not be modified and is for reference only!", "if self._agent is not None: self.ego_vehicles[0].apply_control(ego_action) if self._sync_mode and self._running", "with manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups", "show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False if", "has no criteria\") return True for criterion in self.scenario.get_criteria(): if", "with manager.analyze_scenario() 5. If needed, cleanup with manager.stop_scenario() \"\"\" def", "self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time", "To use the ScenarioManager: 1. Create an object via manager", "self._debug_mode) def run_scenario(self): \"\"\" Trigger the start of the scenario", "= None self.ego_vehicles = None self.other_actors = None self._debug_mode =", "self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\" Trigger", "\\ self.start_system_time self.scenario_duration_game = end_game_time - start_game_time if self.scenario_tree.status ==", "this scenario has no criteria\") return True for criterion in", "Trigger the start of the scenario and wait for it", "self.scenario_tree.status != py_trees.common.Status.RUNNING: self._running = False if self._agent is not", "def _reset(self): \"\"\" Reset all parameters \"\"\" self._running = False", "# py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def", "of scenario and the agent. If running synchornously, it also", "< timestamp.elapsed_seconds and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode:", "This class holds all functionality required to start, and analyze", "junit report, etc.) \"\"\" failure = False timeout = False", "not failure: timeout = True result = \"TIMEOUT\" output =", "is used by the overall signal handler to terminate the", "This function is intended to be called from outside and", "self.scenario_duration_game = end_game_time - start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager:", "scenario via manager.load_scenario() 3. Trigger the execution of the scenario", "is not None: self._agent.setup_sensors(self.ego_vehicles[0], self._debug_mode) def run_scenario(self): \"\"\" Trigger the", "if world: snapshot = world.get_snapshot() if snapshot: timestamp = snapshot.timestamp", "return True for criterion in self.scenario.get_criteria(): if (not criterion.optional and", "scenario has no criteria\") return True for criterion in self.scenario.get_criteria():", "def stop_scenario(self): \"\"\" This function is used by the overall", "which will be filled at load_scenario() \"\"\" self.scenario = None", "start and end of the scenario execution 4. Trigger a", "\"\"\" from __future__ import print_function import sys import time import", "class ScenarioManager(object): \"\"\" Basic scenario manager class. This class holds", "start, and analyze a scenario. The user must not modify", "time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running = True while self._running:", "modify this class. To use the ScenarioManager: 1. Create an", "None self.scenario_class = None self.ego_vehicles = None self.other_actors = None", "\"\"\" if self.scenario is not None: self.scenario.terminate() if self._agent is", "object via manager = ScenarioManager() 2. Load a scenario via", "the overall signal handler to terminate the scenario execution \"\"\"", "<reponame>cgeller/WorldOnRails<gh_stars>100-1000 #!/usr/bin/env python # Copyright (c) 2018-2020 Intel Corporation #", "will be filled at load_scenario() \"\"\" self.scenario = None self.scenario_tree", "False if watchdog exception occured, True otherwise \"\"\" return self._watchdog.get_status()", "False self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0", "Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if", "the scenario manager.run_scenario() This function is designed to explicitly control", "self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time", "None self.scenario_tree = None self.scenario_class = None self.ego_vehicles = None", "2018-2020 Intel Corporation # # This work is licensed under", "implementation. It must not be modified and is for reference", "= True result = \"TIMEOUT\" output = ResultOutputProvider(self, result, stdout,", "CarlaDataProvider.on_carla_tick() if self._agent is not None: ego_action = self._agent() #", "self._tick_scenario(timestamp) self._watchdog.stop() self.cleanup() self.end_system_time = time.time() end_game_time = GameTime.get_time() self.scenario_duration_system", "self.end_system_time = None GameTime.restart() def cleanup(self): \"\"\" This function triggers", "# This work is licensed under the terms of the", "This function is used by the overall signal handler to", "= 0.0 self.start_system_time = None self.end_system_time = None GameTime.restart() def", "class. This class holds all functionality required to start, and", "scenario execution \"\"\" self._running = False def analyze_scenario(self, stdout, filename,", "running synchornously, it also handles the ticking of the world.", "next line # py_trees.display.render_dot_tree(self.scenario_tree) if self._agent is not None: self._agent.setup_sensors(self.ego_vehicles[0],", "and criterion.test_status != \"ACCEPTABLE\"): failure = True result = \"FAILURE\"", "final statistics about the scenario (human-readable, in form of a", "self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors # To print the", "occured, True otherwise \"\"\" return self._watchdog.get_status() def stop_scenario(self): \"\"\" This", "import time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider", "a copy, see <https://opensource.org/licenses/MIT>. \"\"\" This module provides the ScenarioManager", "= None self._debug_mode = debug_mode self._agent = None self._sync_mode =", "import print_function import sys import time import py_trees from srunner.autoagents.agent_wrapper", "print_function import sys import time import py_trees from srunner.autoagents.agent_wrapper import", "\"\"\" This module provides the ScenarioManager implementation. It must not", "sync_mode=False, timeout=2.0): \"\"\" Setups up the parameters, which will be", "if self.scenario is not None: self.scenario.terminate() if self._agent is not", "= False self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0 self.scenario_duration_game =", "self._agent = None self._sync_mode = sync_mode self._running = False self._timestamp_last_run", "py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to failure\") def _tick_scenario(self, timestamp): \"\"\"", "the agent. If running synchornously, it also handles the ticking", "\"TIMEOUT\" output = ResultOutputProvider(self, result, stdout, filename, junit) output.write() return", "at load_scenario() \"\"\" self.scenario = None self.scenario_tree = None self.scenario_class", "\"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"): failure = True result =", "if self._agent is not None: ego_action = self._agent() # Tick", "= self.scenario.scenario_tree self.ego_vehicles = scenario.ego_vehicles self.other_actors = scenario.other_actors # To", "This work is licensed under the terms of the MIT", "under the terms of the MIT license. # For a", "sys import time import py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from", "from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer", "outside and provide the final statistics about the scenario (human-readable,", "GameTime from srunner.scenariomanager.watchdog import Watchdog class ScenarioManager(object): \"\"\" Basic scenario", "Update game time and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent", "None self._sync_mode = sync_mode self._running = False self._timestamp_last_run = 0.0", "start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to failure\")", "== py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to failure\") def _tick_scenario(self, timestamp):", "is designed to explicitly control start and end of the", "timeout = False result = \"SUCCESS\" if self.scenario.test_criteria is None:", "and is for reference only! \"\"\" from __future__ import print_function", "of the scenario manager.run_scenario() This function is designed to explicitly", "= timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick ---------\\n\") # Update", "Create an object via manager = ScenarioManager() 2. Load a", "= GameTime.get_time() self.scenario_duration_system = self.end_system_time - \\ self.start_system_time self.scenario_duration_game =", "reference only! \"\"\" from __future__ import print_function import sys import", "not be modified and is for reference only! \"\"\" from", "\"\"\" def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0): \"\"\" Setups up the", "!= py_trees.common.Status.RUNNING: self._running = False if self._agent is not None:", "the execution of the scenario manager.run_scenario() This function is designed", "world = CarlaDataProvider.get_world() if world: snapshot = world.get_snapshot() if snapshot:", "the scenario (human-readable, in form of a junit report, etc.)", "self._watchdog.start() self._running = True while self._running: timestamp = None world", "scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status", "self.scenario_tree = None self.scenario_class = None self.ego_vehicles = None self.other_actors", "print(\"ScenarioManager: Terminated due to failure\") def _tick_scenario(self, timestamp): \"\"\" Run", "5. If needed, cleanup with manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False,", "self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns: bool: False if watchdog", "not None: ego_action = self._agent() # Tick scenario self.scenario_tree.tick_once() if", "if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated due to failure\") def", "Copyright (c) 2018-2020 Intel Corporation # # This work is", "ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog import Watchdog class", "filled at load_scenario() \"\"\" self.scenario = None self.scenario_tree = None", "and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns: bool: False if", "self._sync_mode = True self.scenario_class = scenario self.scenario = scenario.scenario self.scenario_tree", "timeout = True result = \"TIMEOUT\" output = ResultOutputProvider(self, result,", "all parameters \"\"\" self._running = False self._timestamp_last_run = 0.0 self.scenario_duration_system", "see <https://opensource.org/licenses/MIT>. \"\"\" This module provides the ScenarioManager implementation. It", "and self._running: self._timestamp_last_run = timestamp.elapsed_seconds self._watchdog.update() if self._debug_mode: print(\"\\n--------- Tick", "start_game_time = GameTime.get_time() self._watchdog.start() self._running = True while self._running: timestamp", "and self._running and self._watchdog.get_status(): CarlaDataProvider.get_world().tick() def get_running_status(self): \"\"\" returns: bool:", "watchdog exception occured, True otherwise \"\"\" return self._watchdog.get_status() def stop_scenario(self):", "result = \"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\": result = \"ACCEPTABLE\"", "# Tick scenario self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush()", "ResultOutputProvider(self, result, stdout, filename, junit) output.write() return failure or timeout", "<https://opensource.org/licenses/MIT>. \"\"\" This module provides the ScenarioManager implementation. It must", "parameters, which will be filled at load_scenario() \"\"\" self.scenario =", "= False self._timestamp_last_run = 0.0 self._timeout = timeout self._watchdog =", "self._debug_mode = debug_mode self._agent = None self._sync_mode = sync_mode self._running", "\"\"\" return self._watchdog.get_status() def stop_scenario(self): \"\"\" This function is used", "called from outside and provide the final statistics about the", "\"\"\" self._reset() self._agent = AgentWrapper(agent) if agent else None if", "function triggers a proper termination of a scenario \"\"\" if", "py_trees from srunner.autoagents.agent_wrapper import AgentWrapper from srunner.scenariomanager.carla_data_provider import CarlaDataProvider from", "self.other_actors = scenario.other_actors # To print the scenario tree uncomment", "the ScenarioManager implementation. It must not be modified and is", "and the agent. If running synchornously, it also handles the", "srunner.scenariomanager.carla_data_provider import CarlaDataProvider from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import", "terminate the scenario execution \"\"\" self._running = False def analyze_scenario(self,", "self.scenario_duration_game = 0.0 self.start_system_time = None self.end_system_time = None def", "a new scenario \"\"\" self._reset() self._agent = AgentWrapper(agent) if agent", "self.scenario_tree.tick_once() if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status !=", "and criterion.test_status != \"SUCCESS\" and criterion.test_status != \"ACCEPTABLE\"): failure =", "a scenario via manager.load_scenario() 3. Trigger the execution of the", "Basic scenario manager class. This class holds all functionality required", "0.0 self.start_system_time = None self.end_system_time = None def _reset(self): \"\"\"", "it to finish/fail \"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time =", "timestamp = None world = CarlaDataProvider.get_world() if world: snapshot =", "\"FAILURE\" elif criterion.test_status == \"ACCEPTABLE\": result = \"ACCEPTABLE\" if self.scenario.timeout_node.timeout", "work is licensed under the terms of the MIT license.", "__future__ import print_function import sys import time import py_trees from", "if self._agent is not None: self._sync_mode = True self.scenario_class =", "# # This work is licensed under the terms of", "world: snapshot = world.get_snapshot() if snapshot: timestamp = snapshot.timestamp if", "a result evaluation with manager.analyze_scenario() 5. If needed, cleanup with", "a scenario \"\"\" if self.scenario is not None: self.scenario.terminate() if", "print(\"\\n--------- Tick ---------\\n\") # Update game time and actor information", "scenario (human-readable, in form of a junit report, etc.) \"\"\"", "for reference only! \"\"\" from __future__ import print_function import sys", "= Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game = 0.0 self.start_system_time =", "def get_running_status(self): \"\"\" returns: bool: False if watchdog exception occured,", "\"\"\" Setups up the parameters, which will be filled at", "finish/fail \"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time", "exception occured, True otherwise \"\"\" return self._watchdog.get_status() def stop_scenario(self): \"\"\"", "self._agent.cleanup() self._agent = None CarlaDataProvider.cleanup() def load_scenario(self, scenario, agent=None): \"\"\"", "self.scenario_class = scenario self.scenario = scenario.scenario self.scenario_tree = self.scenario.scenario_tree self.ego_vehicles", "needed, cleanup with manager.stop_scenario() \"\"\" def __init__(self, debug_mode=False, sync_mode=False, timeout=2.0):", "= AgentWrapper(agent) if agent else None if self._agent is not", "= None self._sync_mode = sync_mode self._running = False self._timestamp_last_run =", "= None def _reset(self): \"\"\" Reset all parameters \"\"\" self._running", "the scenario execution 4. Trigger a result evaluation with manager.analyze_scenario()", "= scenario.ego_vehicles self.other_actors = scenario.other_actors # To print the scenario", "True while self._running: timestamp = None world = CarlaDataProvider.get_world() if", "self.start_system_time = None self.end_system_time = None def _reset(self): \"\"\" Reset", "\"\"\" print(\"ScenarioManager: Running scenario {}\".format(self.scenario_tree.name)) self.start_system_time = time.time() start_game_time =", "also handles the ticking of the world. \"\"\" if self._timestamp_last_run", "= timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0 self.scenario_duration_game =", "\"\"\" self._running = False self._timestamp_last_run = 0.0 self.scenario_duration_system = 0.0", "self.scenario_class = None self.ego_vehicles = None self.other_actors = None self._debug_mode", "0.0 self._timeout = timeout self._watchdog = Watchdog(float(self._timeout)) self.scenario_duration_system = 0.0", "import Watchdog class ScenarioManager(object): \"\"\" Basic scenario manager class. This", "3. Trigger the execution of the scenario manager.run_scenario() This function", "self._agent is not None: self._sync_mode = True self.scenario_class = scenario", "self.start_system_time = time.time() start_game_time = GameTime.get_time() self._watchdog.start() self._running = True", "world.get_snapshot() if snapshot: timestamp = snapshot.timestamp if timestamp: self._tick_scenario(timestamp) self._watchdog.stop()", "= None self.end_system_time = None GameTime.restart() def cleanup(self): \"\"\" This", "a junit report, etc.) \"\"\" failure = False timeout =", "failure = False timeout = False result = \"SUCCESS\" if", "and actor information GameTime.on_carla_tick(timestamp) CarlaDataProvider.on_carla_tick() if self._agent is not None:", "etc.) \"\"\" failure = False timeout = False result =", "via manager = ScenarioManager() 2. Load a scenario via manager.load_scenario()", "from outside and provide the final statistics about the scenario", "from srunner.scenariomanager.result_writer import ResultOutputProvider from srunner.scenariomanager.timer import GameTime from srunner.scenariomanager.watchdog", "of the scenario and wait for it to finish/fail \"\"\"", "None: self.scenario.terminate() if self._agent is not None: self._agent.cleanup() self._agent =", "if self._debug_mode: print(\"\\n\") py_trees.display.print_ascii_tree(self.scenario_tree, show_status=True) sys.stdout.flush() if self.scenario_tree.status != py_trees.common.Status.RUNNING:", "4. Trigger a result evaluation with manager.analyze_scenario() 5. If needed,", "to terminate the scenario execution \"\"\" self._running = False def", "\"\"\" Trigger the start of the scenario and wait for", "failure\") def _tick_scenario(self, timestamp): \"\"\" Run next tick of scenario", "termination of a scenario \"\"\" if self.scenario is not None:", "be called from outside and provide the final statistics about", "in self.scenario.get_criteria(): if (not criterion.optional and criterion.test_status != \"SUCCESS\" and", "no criteria\") return True for criterion in self.scenario.get_criteria(): if (not", "= end_game_time - start_game_time if self.scenario_tree.status == py_trees.common.Status.FAILURE: print(\"ScenarioManager: Terminated", "not None: self.scenario.terminate() if self._agent is not None: self._agent.cleanup() self._agent", "self._agent is not None: ego_action = self._agent() # Tick scenario", "= False def analyze_scenario(self, stdout, filename, junit): \"\"\" This function" ]
[ "-> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of the command for the", "import annotations from typing import * import hashlib from edb", "self._validate(schema, context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit", ") verb = 'are' if len(non_renamed_bases) > 1 else 'is'", "' f'{\", \".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context, ) elif", "= True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls,", "s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd =", "2.0 (the \"License\"); # you may not use this file", "return schema def get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext, bases:", "# Child is either defined locally or is inherited #", "pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def", "scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls))", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext(", "name if self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name})", "the creation of a new ref to descendants of #", "s_schema from . import name as sn from . import", "with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema,", "context) parent_ctx = cls.get_referrer_context(context) if parent_ctx is not None: assert", "child_coll.get(schema, child_refname, None) if existing is not None: alter =", "schema def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject )", ") delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with", "if len(non_renamed_bases) > 1 else 'is' vn = scls.get_verbosename(orig_schema) raise", "return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast(", "classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd =", "context) self.add(r_alter_cmd) context.current().enable_recursion = rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT],", "RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext", "@classmethod def _classname_quals_from_name( cls, name: sn.SchemaName, ) -> Tuple[str, ...]:", "cmd = cmdcls(classname=derived_name) for k, v in derived_attrs.items(): cmd.set_attribute_value(k, v)", "cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name: str,", "authors. # # Licensed under the Apache License, Version 2.0", "refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert astnode is", "of the delta tree. Instead, generate a command group #", "= self._propagate_ref_deletion( schema, context, refdict, child, existing) alter.add(cmd) self.add(alter) return", "referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return schema else:", "sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in", "self.get_subject(schema) if subject is not None: pn = subject.get_verbosename(schema, with_parent=True)", "self.add(alter) return schema def _propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext,", "None: cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls =", "def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None:", "self._validate(schema, context) return schema def _create_ref( self, schema: s_schema.Schema, context:", "= ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname,", "not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs", "cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd", "!= default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b )", "context=self.source_context, details=f'{vn} is inherited from:\\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die(", "cmd.set_attribute_value(k, v) if existing is not None: new_bases = derived_attrs['bases']", "None, transient: bool = False, name: Optional[str] = None, **kwargs:", "self.get_referrer_context(context) if referrer_ctx is None: return schema else: referrer =", "schema: s_schema.Schema) -> Optional[so.Object]: # NB: classes that inherit ReferencedObject", "referrer is None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj", "for child in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema,", "None): cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self, schema: s_schema.Schema, context:", "implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases = removed_bases return super().apply(schema,", "True if transient: context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id =", "(isinstance(referrer, so.InheritingObject) and not context.canonical): if (not context.in_deletion(offset=1) and not", "op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return schema def", "parent: ReferencedObject, name: str ) -> qlast.ObjectRef: # reduce name", "so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases(", "self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and", "struct from edb.edgeql import ast as qlast from . import", ") -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of the command for", "= None, **kwargs: Any ) -> ReferencedObjectCommandMeta: cls = super().__new__(mcls,", "License for the specific language governing permissions and # limitations", "derived object must # also be marked as derived, to", "= rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext(", "Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if subject_ctx is not None", "scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref(", "context) scls = self.scls if not context.canonical and not scls.generic(schema):", "not self.get_attribute_value('declared_overloaded')): ancestry = [] for obj in implicit_bases: bref", "out explicit bases implicit_bases = [ b for b in", "= Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context) @classmethod", "is not None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj)", "implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext,", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd =", "explicit definition and is not #: purely inherited. is_local =", "if bases: bases = so.ObjectList.create( schema, implicit_bases + [ b", "obj is not None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema)", "was_local and now_local: self._validate(schema, context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT],", "ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) def apply( self,", ") -> s_schema.Schema: schema = super()._create_ref(schema, context, referrer) if (not", "so.DerivableInheritingObject, ReferencedObject, ): # Indicates that the object has been", "this would be the context of the `create/alter/etc type` command.", "r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec return schema class CreateReferencedInheritingObject(", "derived, to be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter", "= [ b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases ] pnames", "scls = self.scls was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema, context)", "source file is part of the EdgeDB open source project.", "ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd:", "class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast(", "in implicit_bases ] pnames = '\\n- '.join( p.get_verbosename(schema, with_parent=True) for", "marked as derived, to be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived',", "= alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd class", "= True if transient: context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id", "declared using the `overloaded` keyword because ' f'it is defined", "Instead, generate a command group # containing Alter(if_exists) and Create(if_not_exists)", "obj = obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd: sd.Command", "ref in child should no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject,", "schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name: str ) ->", "assert obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj))", "astnode else: return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self, schema:", "astnode: qlast.ObjectDDL, parents: Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode,", "= referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname)", "with `subject = SchemaField` raise NotImplementedError def get_referrer(self, schema: s_schema.Schema)", "schema = self._delete_ref(schema, context, referrer) return schema def _delete_ref( self,", "None and not context.canonical: objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls", "new ref to descendants of # our referrer. schema =", "-> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context)", "fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field)", "derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls = type(self) referrer_class", "would be the context of the `create/alter/etc type` command. \"\"\"", "cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name: str, context:", "f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared `overloaded` as there are", "get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: # NB: classes that inherit", "sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject,", "= True parent_cmd.add(cmd) schema = delta.apply(schema, context) derived: ReferencedT =", "if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not context.canonical and", "= reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr,", "inherited from:\\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for", "ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode =", "@classmethod def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is", "OF ANY KIND, either express or implied. # See the", "= self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls", "See the License for the specific language governing permissions and", "_build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer:", "as sd from . import inheriting from . import objects", "_classname_quals_from_name( cls, name: sn.SchemaName, ) -> Tuple[str, ...]: return ()", "child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child)", "= referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr)", "to in writing, software # distributed under the License is", "-> Optional[so.Object]: # NB: classes that inherit ReferencedObject define a", "-> Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases", "= astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls,", "= super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema) if subject is", "ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode:", "Tuple[str, ...]: return () @classmethod def _classname_quals_from_name( cls, name: sn.SchemaName,", "refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr,", "None, ) -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx is", "or agreed to in writing, software # distributed under the", "has been declared as # explicitly inherited. declared_overloaded = so.SchemaField(", "sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]:", "schema: s_schema.Schema, context: sd.CommandContext, bases: Any, ) -> Sequence[str]: mcls", "not context.canonical: objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer,", "referrer_ctx = self.get_referrer_context(context) implicit_bases = None if referrer_ctx is not", "self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context) if referrer_ctx is not", "context, child): schema, cmd = self._propagate_ref_deletion( schema, context, refdict, child,", "= TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #:", "): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode:", "check until the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context,", "schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema, context)", "parent_cmd = cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)):", "is not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must", "= so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def", "Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str]", "mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls)", "cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand)", "self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment = ( f'inherited", "compliance with the License. # You may obtain a copy", "= reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls =", "# Indicates that the object has been declared as #", "generate a command group # containing Alter(if_exists) and Create(if_not_exists) #", "] = None def __new__(mcls, name: str, bases: Tuple[type, ...],", "sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject,", "objects. vn = scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr)", "def _get_ast( self, schema: s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation]", "deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases: # Cannot remove inherited", "referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name if self.get_name(schema)", "if cls._referrer_context_class is None: raise TypeError( f'referrer_context_class is not defined", "= sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases,", "sd.CommandContext, bases: Any, ) -> Sequence[str]: mcls = self.get_schema_metaclass() default_base", "False, name: Optional[str] = None, **kwargs: Any, ) -> Tuple[s_schema.Schema,", "parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context:", "context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context) return schema def derive_ref(", "inheriting.delta_bases( [b.get_name(schema) for b in child_bases], [b.get_name(schema) for b in", "referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd", "not use this file except in compliance with the License.", "schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context) if", "= self.scls implicit_bases = [ b for b in scls.get_bases(schema).objects(schema)", "removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls =", "so.InheritingObject, ) -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls)", "isinstance(referrer, so.InheritingObject) and not context.canonical and context.enable_recursion): # Propagate the", "self._get_implicit_ref_bases( schema, context, child, refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema)", "not in implicit_bases ], ) else: bases = so.ObjectList.create( schema,", "you may not use this file except in compliance with", "else: if isinstance(self.astnode, (list, tuple)): return self.astnode[1] else: return self.astnode", "if not child.allow_ref_propagation(schema, context, refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with", "if mark_derived: context.current().mark_derived = True if transient: context.current().transient_derivation = True", "- context.renamed_objs # This object is inherited from one or", "so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)],", "inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) def apply( self, schema:", ") rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema,", "Optional[str] = None, **kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if", "implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls,", "None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd =", "added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name)", "child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls", "assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) # Filter out explicit", "context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived = True if transient:", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "assert isinstance(name, sn.Name) return name @classmethod def _classname_from_name( cls, name:", "r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant):", "-> s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if", "for b in implicit_bases ] pnames = '\\n- '.join( p.get_verbosename(schema,", "return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT],", "sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str) -> None:", "classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd,", "self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls =", "mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls assert", "isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context,", "schema: s_schema.Schema, context: sd.CommandContext, name: str, parent: ReferencedObject) -> qlast.ObjectDDL:", "refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def", "derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str, object] = {} if", "if context.descriptive_mode: astnode = super()._get_ast( schema, context, parent_node=parent_node, ) assert", "get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd", "clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not None:", "schema = op.apply(schema, context) return schema def _propagate_ref_rename(self, schema: s_schema.Schema,", "Any, ) -> Sequence[str]: mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name()", "default=None) if existing is not None: cmdcls: Type[sd.Command] = \\", "defined locally or is inherited # from another parent, so", "refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema:", "get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls)", "'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}',", "schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge:", "= sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name,", "= sd.DeltaRoot() if referrer is None: assert isinstance(scls, ReferencedObject) referrer", "cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref)", "= sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref = qlast.ObjectRef( name=shortname,", "= cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def", "referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class = type(referrer) mcls", "sn.SchemaName, ) -> sn.Name: base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name)", "None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool = False, name:", "NB: classes that inherit ReferencedObject define a `get_subject` # method", "so.ObjectCollectionShell): base_names = [ b.name for b in bases.items if", "context, parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, )", "context, parent, name) astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref) assert", "type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name)", "vn = super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema) if subject", "if not was_local and now_local: self._validate(schema, context) return schema class", "source project. # # Copyright 2008-present MagicStack Inc. and the", "referrer context for {cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ):", "of the EdgeDB open source project. # # Copyright 2008-present", "refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool = False, name: Optional[str]", "referrer_field) parent_item = parent_coll.get(schema, refname, default=None) if (parent_item is not", "schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) ->", "import objects as so from . import schema as s_schema", "the object has been declared as # explicitly inherited. declared_overloaded", "is part of the EdgeDB open source project. # #", "mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref in", ")) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def get_implicit_bases( self, schema:", "context, referrer, refdict.attr, self_name)) deleted_bases = set() for ctx in", "# Filter out explicit bases implicit_bases = [ b for", "for expr in exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema:", "= get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The", "command group # containing Alter(if_exists) and Create(if_not_exists) # to postpone", "inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases = [", "sd.Command = delta for obj in reversed(object_stack): assert obj is", "): @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context:", "as sn from . import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject')", "if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter)", "errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=( f'{vn} is inherited from", "*, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any )", "alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema): d_name", "referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing", "context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module)", "context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema", "derived_attrs: Dict[str, object] = {} if attrs is not None:", "with alter.new_context(schema, context, child): # This is needed to get", "ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname,", "existing.get_bases(schema) if new_bases != old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases", "= obj.get_referrer(schema) assert bref is not None ancestry.append(bref) raise errors.SchemaDefinitionError(", "child.allow_ref_propagation(schema, context, refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context,", "= schema.get(derived_name) return schema, derived def get_verbosename( self, schema: s_schema.Schema,", "if not context.canonical and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases", "if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname = name", "modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context,", "Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any ) -> ReferencedObjectCommandMeta: cls", "type(self).get_referrer_context(context) if refctx is not None: if not self.get_attribute_value('is_local'): if", "{verb} not being renamed' ), context=self.source_context, ) if context.enable_recursion: schema", ") rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name,", "refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand(", "derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname, default=None)", "self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object]", "no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name)", "if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases: #", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "_create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx", "ReferencedObject, name: str ) -> qlast.ObjectRef: # reduce name to", "_alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: scls", "TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True", "m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext )", "self_name)) deleted_bases = set() for ctx in context.stack: if isinstance(ctx.op,", "implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b in child_bases],", "Child is either defined locally or is inherited # from", "the same op, and this is an error. if non_renamed_bases:", "Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of the command for the referring", "= set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name)) deleted_bases = set()", "This object is inherited from one or more ancestors that", "refdict_whitelist is not None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived", "schema = super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if not was_local", "not child.allow_ref_propagation(schema, context, refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema,", "): @classmethod def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class", "sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] =", "is not None: pn = subject.get_verbosename(schema, with_parent=True) return f'{vn} of", "def get_verbosename( self, schema: s_schema.Schema, *, with_parent: bool = False,", "child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), )", ") -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self,", "either be created or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname =", "name: str, bases: Tuple[type, ...], clsdct: Dict[str, Any], *, referrer_context_class:", "self._propagate_ref_creation(schema, context, referrer) return schema def _propagate_ref_creation( self, schema: s_schema.Schema,", "ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname, default=None) if (parent_item is", "file except in compliance with the License. # You may", "= scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr) for b", "if new_bases != old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases =", "= name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref", "is None: raise RuntimeError(f'no referrer context for {cls}') return ctx", "type(self)) cmd = cmdcls(classname=derived_name) for k, v in derived_attrs.items(): cmd.set_attribute_value(k,", "refctx is not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return", "for k, v in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is", "b in child_bases], [b.get_name(schema) for b in new_bases], ) def", "b.generic(schema) and b.get_name(schema) != default_base ] new_bases = implicit_bases +", "Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname)", ") -> s_schema.Schema: scls = self.scls was_local = scls.get_is_local(schema) schema", "= referrer object_stack = [] if type(self) != type(referrer): object_stack.append(referrer)", "object is inherited from one or more ancestors that #", "name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None):", "s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases", "default_base = refcls.get_default_base_name() explicit_bases = [ b for b in", "new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls(", ") -> qlast.ObjectRef: # reduce name to shortname if sn.Name.is_qualified(name):", "in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if", "self, schema: s_schema.Schema, context: sd.CommandContext, bases: Any, ) -> Sequence[str]:", "longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema", "name) astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL)", "cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext, ) ->", "] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]]", "sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx is", "Tuple[str, ...]: return () @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs:", "-> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls =", "self.get_attribute_value('declared_overloaded')): ancestry = [] for obj in implicit_bases: bref =", "= sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd", "= None, ) -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx", "context) return schema def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext,", "referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema, context)", "qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls = cls.referenced_astnode", "locally or is inherited # from another parent, so we", "`overloaded` as there are no ' f'ancestors defining it.', context=self.source_context,", "removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases", "if the object has an explicit definition and is not", "context: sd.CommandContext ) -> sn.Name: name = super()._classname_from_ast(schema, astnode, context)", "parent: ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent, name)", "self.implicit: mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls", "qlast.ObjectDDL, parents: Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context))", "sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd =", ") -> None: scls = self.scls implicit_bases = [ b", "sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for k, v in derived_attrs.items():", "scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=( f'{vn} is", "subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}' return vn class ReferencedInheritingObject(", "`create/alter/etc concrete link` command this would be the context of", "' f'ancestors defining it.', context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema,", "ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists = True", "_ref_rename(alter_cmd: sd.Command, refname: str) -> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef(", "name @classmethod def _classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName, )", "a `create/alter/etc concrete link` command this would be the context", "cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self,", "return schema def _propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext, refdict:", "return schema, derived def get_verbosename( self, schema: s_schema.Schema, *, with_parent:", "-> Optional[so.Object]: return self.get_subject(schema) def delete(self, schema: s_schema.Schema) -> s_schema.Schema:", "self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself')", "introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema,", "None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj", "# NB: classes that inherit ReferencedObject define a `get_subject` #", "], ) else: bases = so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases',", "E.g. for a `create/alter/etc concrete link` command this would be", "KIND, either express or implied. # See the License for", "self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards(", "type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema))", "= [ b for b in base_names if ( b", "not None: new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases", "context) def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object,", ") ] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod", "shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref =", "child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type for", "str], None] ) -> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion =", "refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name(", "-> s_schema.Schema: if not context.canonical and self.implicit: mcls = self.get_schema_metaclass()", "exprs: Iterable[str]) -> str: m = hashlib.sha1() for expr in", "def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema:", "v in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is not None:", "scls.get_referrer(schema) obj = referrer object_stack = [] if type(self) !=", "def get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext, bases: Any, )", "None and ref_astnode is not None: return ref_astnode else: if", "= alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema, cmd = self._propagate_ref_deletion(", "context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class", "define a `get_subject` # method dynamically, with `subject = SchemaField`", "get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls)", "import schema as s_schema from . import name as sn", "= self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names =", "(the \"License\"); # you may not use this file except", "List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = []", "type(self)) cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True,", "# Copyright 2008-present MagicStack Inc. and the EdgeDB authors. #", "Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls, name: str, bases:", "hashlib.sha1() for expr in exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self,", "(not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not context.canonical and context.enable_recursion):", "import inheriting from . import objects as so from .", "raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=( f'{vn} is inherited", "get created in a sibling branch # of the delta", "reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT],", "reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema,", "schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context)", "is inherited from:\\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class)", "refdict.attr) existing = refcoll.get(schema, refname, default=None) if existing is not", ") -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases", ") elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}:", "so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases(", "referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict", "= removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ):", "isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases: # Cannot", "if (parent_item is not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return", "# # Unless required by applicable law or agreed to", "in bases.objects(schema) if b not in implicit_bases ], ) else:", "raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str, object]", "= {} if attrs is not None: derived_attrs.update(attrs) derived_attrs['name'] =", "refname, default=None) if (parent_item is not None and not parent_item.get_is_final(schema)):", "a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases)", "if implicit_bases: # Cannot remove inherited objects. vn = scls.get_verbosename(schema,", "in scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls", "refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context,", "added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def get_implicit_bases(", "(implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry = [] for", "scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls)", "Callable[[sd.Command, str], None] ) -> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion", "_propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema:", "with_parent=True)} ' f'must be declared using the `overloaded` keyword because", "# either be created or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname", "parent_ctx.op.classname base_name: str try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema,", "s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context)", "self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx =", "child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd =", "astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls, schema:", "return cmd def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, )", "name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context, child, refdict.attr,", "= refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases = [ b for", "return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self,", "fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema,", "self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema:", "referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name =", "implied. # See the License for the specific language governing", "cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]:", "= sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals)", "!= default_base ] new_bases = implicit_bases + explicit_bases return inheriting.delta_bases(", "astnode, context) # We cannot check for ref existence in", "implicit_bases: bref = obj.get_referrer(schema) assert bref is not None ancestry.append(bref)", "referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls =", "self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer,", "mark_derived: bool = False, attrs: Optional[Dict[str, Any]] = None, dctx:", "default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name", "s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if", "isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases =", "so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer,", "ctx in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases", "defining it.', context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext,", "@classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx", "non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema, with_parent=True) for b in", ") -> sn.Name: base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn", "class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ]", "derived_name_base=derived_name_base) else: derived_name = name if self.get_name(schema) == derived_name: raise", "referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ):", "being renamed' ), context=self.source_context, ) if context.enable_recursion: schema = self._propagate_ref_rename(schema,", "refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if getattr(astnode,", "context.current().preserve_path_id = True parent_cmd.add(cmd) schema = delta.apply(schema, context) derived: ReferencedT", "in exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context:", "so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract',", "RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) def apply(", "= True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All references in", "inherited from one or more ancestors that # are not", "the context of the command for the referring object, if", "for b in scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx =", "return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext, )", "self.add(r_alter_cmd) context.current().enable_recursion = rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT],", "= ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter)", "derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True,", "context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge", "context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context)", "self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and", "= r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context,", "base_names if ( b != default_base and isinstance(b, sn.SchemaName) and", "Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject,", "= cls.get_referrer_context(context) if parent_ctx is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand)", "metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if", "else: return self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext,", "import errors from edb.common import struct from edb.edgeql import ast", ") return astnode else: return None else: astnode = super()._get_ast(", "= cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name,", "@classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext )", "op=delta)): if not inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist is", "needed to get the correct inherited name which will #", "in base_names if ( b != default_base and isinstance(b, sn.SchemaName)", "self._propagate_ref_rename(schema, context, scls) else: for op in self.get_subcommands(type=sd.ObjectCommand): schema =", "context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: schema = super()._create_ref(schema,", "open source project. # # Copyright 2008-present MagicStack Inc. and", "= cmdcls(classname=derived_name) for k, v in derived_attrs.items(): cmd.set_attribute_value(k, v) if", "inherited name which will # either be created or rebased.", "= referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT],", "scls = self.scls if not context.canonical and not scls.generic(schema): implicit_bases", "Unless required by applicable law or agreed to in writing,", "= type(self.scls) referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd", "tree. Instead, generate a command group # containing Alter(if_exists) and", "ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context, )", "as # explicitly inherited. declared_overloaded = so.SchemaField( bool, default=False, compcoef=None,", "sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls,", "mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr,", "sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name)", "if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self,", "for {cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class", "schema, context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd =", "s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases = None if referrer_ctx is", ") if context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls) else: for", "referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll =", "and not context.canonical: self._validate(schema, context) return schema def _create_ref( self,", "the specific language governing permissions and # limitations under the", "context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def", "self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd", "shortname = name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return", "exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema =", "child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), ) child_refname =", "if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj =", "Iterable[str]) -> str: m = hashlib.sha1() for expr in exprs:", "if refdict.requires_explicit_overloaded and implicit_bases: assert astnode is not None astnode.declared_overloaded", "referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema,", "ReferencedObjectCommandMeta) if referrer_context_class is not None: cls._referrer_context_class = referrer_context_class return", "ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd,", "that inherit ReferencedObject define a `get_subject` # method dynamically, with", "referrer) return schema def _propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext,", "): implicit = struct.Field(bool, default=False) def apply( self, schema: s_schema.Schema,", "None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema,", "-= deleted_bases if implicit_bases: # Cannot remove inherited objects. vn", "str, parent: ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent,", "in bases.items if b.name is not None ] else: assert", "-> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases =", "= self._classname_from_ast(schema, astnode, context) # We cannot check for ref", "deleted_bases if implicit_bases: # Cannot remove inherited objects. vn =", "**kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if name is None:", "d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema =", "ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema:", "f'{vn} of {pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ):", "def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any)", "ReferencedObject define a `get_subject` # method dynamically, with `subject =", "sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is", "refname = reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema,", "s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name: name =", "astnode = super()._get_ast( schema, context, parent_node=parent_node, ) assert astnode is", "base_name: str, referrer_name: str, context: sd.CommandContext, ) -> Tuple[str, ...]:", "is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd =", "not None and not context.canonical: self._validate(schema, context) return schema def", "return schema def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "is an error. if non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema,", "...], clsdct: Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] =", "= descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer", "schema, context, referrer, refdict.attr, self.classname) if implicit_bases: bases = self.get_attribute_value('bases')", "parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname, default=None) if", "schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd", "self._propagate_ref_deletion( schema, context, refdict, child, existing) alter.add(cmd) self.add(alter) return schema", "not None: if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast(", "child_bases], [b.get_name(schema) for b in new_bases], ) def _validate( self,", "# to postpone that check until the application time. ref_create", "import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject')", "schema, [self]) mcls = type(self) referrer_class = type(referrer) refdict =", "exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext", "from one or more ancestors that # are not renamed", "= ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref in child should", "from edb.common import struct from edb.edgeql import ast as qlast", "...]: return () @classmethod def _classname_quals_from_name( cls, name: sn.SchemaName, )", "as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) ->", "d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context,", "the referring object, if any. E.g. for a `create/alter/etc concrete", "alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode:", "cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, )", "= referrer_ctx.scls referrer_class = type(referrer) mcls = type(scls) refdict =", "a derived object must # also be marked as derived,", "refctx = cls.get_referrer_context(context) if refctx is not None: cmd.set_attribute_value('is_local', True)", "sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in referrer.children(schema): assert isinstance(child, so.QualifiedObject)", "return f'{vn} of {pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject,", "False, attrs: Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext] = None,", "not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class:", "implicit_bases and not context.declarative: mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die(", "cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command:", "return cls._referrer_context_class @classmethod def get_referrer_context( cls, context: sd.CommandContext, ) ->", ") alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in referrer.children(schema):", "ReferencedObject(so.DerivableObject): #: True if the object has an explicit definition", "= None, dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str] = None,", ") -> s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context)", "None def __new__(mcls, name: str, bases: Tuple[type, ...], clsdct: Dict[str,", "isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type =", "referrer_context_class is not None: cls._referrer_context_class = referrer_context_class return cls class", "-> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command,", "else: # The ref in child should no longer exist.", "return schema else: referrer = referrer_ctx.scls schema = self._delete_ref(schema, context,", "referrer_class) for child in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll =", "b for b in base_names if ( b != default_base", "context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema,", "def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema:", "get the correct inherited name which will # either be", "sd.CommandContext, parent: ReferencedObject, name: str ) -> qlast.ObjectRef: # reduce", "obj = None cmd: sd.Command = delta for obj in", "ref to descendants of # our referrer. schema = self._propagate_ref_creation(schema,", "self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge =", "= False referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class =", "NotImplementedError def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def", "True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin( self, schema:", "implicit_bases: assert astnode is not None astnode.declared_overloaded = True return", ") -> str: vn = super().get_verbosename(schema) if with_parent: subject =", "= type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls)", "descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer", ") def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: # NB: classes", "context) now_local = scls.get_is_local(schema) if not was_local and now_local: self._validate(schema,", "purely inherited. is_local = so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK,", "for ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname", "refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema)", "context.current().mark_derived = True if transient: context.current().transient_derivation = True if preserve_path_id:", "in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment = (", "is not None inherited_from = [ sn.quals_from_fullname(b)[0] for b in", "context, referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not", "context: sd.CommandContext, ) -> Tuple[str, ...]: return () @classmethod def", "sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]:", "context.canonical and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases)", "self.scls.get_name(schema) for child in referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict):", "= scls.get_implicit_bases(schema) if implicit_bases and not context.declarative: mcls = self.get_schema_metaclass()", "mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema,", "ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext,", "set(implicit_bases) - context.renamed_objs # This object is inherited from one", "= type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls)", "s_schema.Schema: orig_schema = schema schema = super()._rename_begin(schema, context) scls =", "parent_fq_refname = self.scls.get_name(schema) for child in referrer.children(schema): if not child.allow_ref_propagation(schema,", "scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema)", "raise errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn} is inherited", "and this is an error. if non_renamed_bases: bases_str = ',", "implicit_bases: bases = self.get_attribute_value('bases') if bases: bases = so.ObjectList.create( schema,", "ancestors that # are not renamed in the same op,", "@classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) -> str: m", "no ' f'ancestors defining it.', context=self.source_context, ) def _propagate_ref_op(self, schema:", "You may obtain a copy of the License at #", "mcls) for descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant,", "schema: s_schema.Schema, *, with_parent: bool = False, ) -> str:", "None: raise TypeError( f'referrer_context_class is not defined for {cls}') return", "name which will # either be created or rebased. ref_field_type", "f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn} is inherited from:\\n- {pnames}'", "references in a derived object must # also be marked", "= cls.get_referrer_context(context) if refctx is not None: cmd.set_attribute_value('is_local', True) assert", "self, schema: s_schema.Schema, *, with_parent: bool = False, ) ->", "r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls)", "ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls, ) ->", "super().__new__(mcls, name, bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class", "= self._propagate_ref_creation(schema, context, referrer) return schema def _propagate_ref_creation( self, schema:", "-> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases = None if referrer_ctx", "module=referrer_name.module) assert isinstance(name, sn.Name) return name @classmethod def _classname_from_name( cls,", "not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema,", "= scls.get_referrer(schema) obj = referrer object_stack = [] if type(self)", "from edb.edgeql import ast as qlast from . import delta", "not context.declarative: mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls)", "if referrer_ctx is None: return schema else: referrer = referrer_ctx.scls", "inherited. declared_overloaded = so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True,", "base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name", "bases implicit_bases = [ b for b in base_names if", "-> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases =", "def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def delete(self,", ") -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx is not", "self.added_bases = added_bases self.removed_bases = removed_bases return super().apply(schema, context) class", "schema, astnode, base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals)", ". import objects as so from . import schema as", "scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry =", ") -> s_schema.Schema: if not context.canonical and self.implicit: mcls =", "f'referrer_context_class is not defined for {cls}') return cls._referrer_context_class @classmethod def", "= type(self.scls) name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context,", "if context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls) else: for op", "2008-present MagicStack Inc. and the EdgeDB authors. # # Licensed", "sn from . import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT", "s_schema.Schema) -> Optional[so.Object]: # NB: classes that inherit ReferencedObject define", "in ancestry)}', context=self.source_context, ) elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise", "schema, implicit_bases + [ b for b in bases.objects(schema) if", "new_bases = implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b", "implicit_bases -= deleted_bases if implicit_bases: # Cannot remove inherited objects.", "referrer_name = parent_ctx.op.classname base_name: str try: base_ref = utils.ast_to_object( astnode.name,", "qlast from . import delta as sd from . import", "schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name: name", "the EdgeDB authors. # # Licensed under the Apache License,", "itself') derived_attrs: Dict[str, object] = {} if attrs is not", "= derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases != old_bases: assert", "sd.DeltaRoot() if referrer is None: assert isinstance(scls, ReferencedObject) referrer =", "this # time, because it might get created in a", "is not None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived =", "= cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode", "in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "bases) schema = super()._create_begin(schema, context) if referrer_ctx is not None", "= reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname, None) if", "= ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname, default=None) if (parent_item", "context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases", "s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd =", ") assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context,", "= parent_coll.get(schema, refname, default=None) if (parent_item is not None and", "continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): # This", "is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name:", "_create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx", "s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def delete(self, schema: s_schema.Schema) ->", "= super()._create_begin(schema, context) if referrer_ctx is not None and not", "referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ),", "AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema,", "s_schema.Schema: if not context.canonical and self.implicit: mcls = self.get_schema_metaclass() refctx", "descendant.get_referrer(schema) assert d_referrer is not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd", "cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT],", "Create(if_not_exists) # to postpone that check until the application time.", "a command group # containing Alter(if_exists) and Create(if_not_exists) # to", "descendants of # our referrer. schema = self._propagate_ref_creation(schema, context, referrer)", "created in a sibling branch # of the delta tree.", "not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta(", "= cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema", "b ) ] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ):", "def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases:", "schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name if", "assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for b", "not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def", "cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not", "= referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class,", "super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode:", "parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context) return schema", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates that the", "rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def", "): def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema:", "s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] )", "with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases", "License. # You may obtain a copy of the License", "+ explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b in child_bases], [b.get_name(schema)", "), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return", "self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not context.canonical and context.enable_recursion): #", "= so.ObjectList.create( schema, implicit_bases + [ b for b in", "issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname =", "if referrer_context_class is not None: cls._referrer_context_class = referrer_context_class return cls", "return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool,", "sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if", "subject is not None: pn = subject.get_verbosename(schema, with_parent=True) return f'{vn}", "cmd def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) ->", "not inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist is not None:", "schema = super()._rename_begin(schema, context) scls = self.scls if not context.canonical", "[ b for b in child_bases if b.generic(schema) and b.get_name(schema)", "TypeError( f'referrer_context_class is not defined for {cls}') return cls._referrer_context_class @classmethod", "= rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls,", "sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: schema = super()._create_ref(schema, context,", "of a new ref to descendants of # our referrer.", "= type(self).get_referrer_context(context) if refctx is not None: if not self.get_attribute_value('is_local'):", "referrer_field: str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject)", "objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class", "_propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, ) ->", "schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False)", "ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) ->", "cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion =", "`get_subject` # method dynamically, with `subject = SchemaField` raise NotImplementedError", "Optional[AbstractSet[str]] = None, transient: bool = False, name: Optional[str] =", "class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema, context:", "and # limitations under the License. # from __future__ import", "qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode,", "else: referrer = referrer_ctx.scls schema = self._delete_ref(schema, context, referrer) return", "context, child): # This is needed to get the correct", "if parent_ctx is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name =", "type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta,", "get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [", "return astnode else: return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self,", ") -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases", "branch # of the delta tree. Instead, generate a command", "implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be", "`subject = SchemaField` raise NotImplementedError def get_referrer(self, schema: s_schema.Schema) ->", "= None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if", "sn.Name: name = super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context) if", "Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema,", "self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names = [", "as qlast from . import delta as sd from .", "refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases = [ b for b", "refctx is not None: if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode", "refcoll = referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname, default=None) if", "= reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema,", "isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not None: cls._referrer_context_class = referrer_context_class", "inherit ReferencedObject define a `get_subject` # method dynamically, with `subject", "referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn,", "List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name()", "or implicit_bases: # Child is either defined locally or is", "-> sn.Name: name = super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context)", "context, scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases = removed_bases", "Any]] = None, dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str] =", "context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer,", "): def _get_ast( self, schema: s_schema.Schema, context: sd.CommandContext, *, parent_node:", "False if refdict_whitelist is not None: context.current().inheritance_refdicts = refdict_whitelist if", "if context.declarative and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and not", "= self.scls if not context.canonical and not scls.generic(schema): implicit_bases =", "schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName,", "ref existence in this child at this # time, because", "while obj is not None: if isinstance(obj, ReferencedObject): obj =", "classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema", "added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema, )", "b in bases.objects(schema) if b not in implicit_bases ], )", "schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd", "is not None: return ref_astnode else: if isinstance(self.astnode, (list, tuple)):", "= get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict", "referrer_name: sn.SchemaName, ) -> sn.Name: base_name = sn.shortname_from_fullname(name) quals =", "= parent_ctx.op.classname base_name: str try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases,", "@classmethod def _classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName, ) ->", "isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer is not None", "{} if attrs is not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name", "-> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls =", ") -> Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject)", "sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of the command", "explicit bases implicit_bases = [ b for b in base_names", "if with_parent: subject = self.get_subject(schema) if subject is not None:", "sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd", "referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell(", "module=parent.get_shortname(schema).module, ) return nref def _create_innards( self, schema: s_schema.Schema, context:", "disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd)", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT],", "declared `overloaded` as there are no ' f'ancestors defining it.',", "the context of the `create/alter/etc type` command. \"\"\" ctxcls =", "cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls,", "= self.scls was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local", "mark_derived: context.current().mark_derived = True if transient: context.current().transient_derivation = True if", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "if not inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist is not", ") -> s_schema.Schema: orig_schema = schema schema = super()._rename_begin(schema, context)", "if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast( schema, context,", "context) return schema def derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer:", "'referenced_astnode', None) if subject_ctx is not None and ref_astnode is", "alter.new_context(schema, context, child): # This is needed to get the", "def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: # NB: classes that", "= self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, scls,", "schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema,", "language governing permissions and # limitations under the License. #", "b in child_bases if b.generic(schema) and b.get_name(schema) != default_base ]", "as so from . import schema as s_schema from .", "required by applicable law or agreed to in writing, software", "*qualifiers: str, mark_derived: bool = False, attrs: Optional[Dict[str, Any]] =", "= SchemaField` raise NotImplementedError def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]:", "with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname)", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "__future__ import annotations from typing import * import hashlib from", "scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls =", "Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot()", "so.Object, ) -> s_schema.Schema: schema = super()._create_ref(schema, context, referrer) if", "s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls =", "explicitly inherited. declared_overloaded = so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False,", "type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd", "added_bases = self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases =", "cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of", "and is not #: purely inherited. is_local = so.SchemaField( bool,", "mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert", "cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls =", "sn.shortname_from_fullname(b) != b ) ] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT],", "get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema, context) return", "parent, name) astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode,", "= self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases", "so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls)", "agreed to in writing, software # distributed under the License", "ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject):", "def get_referrer_context_or_die( cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx =", "from typing import * import hashlib from edb import errors", "= so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self,", "distributed under the License is distributed on an \"AS IS\"", ") def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]:", "p in parents ) raise errors.SchemaError( f'cannot drop inherited {vn}',", "= rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls)", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for k, v", "Cannot remove inherited objects. vn = scls.get_verbosename(schema, with_parent=True) parents =", "astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema,", "{pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates", "ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context:", "-> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name", "= None if referrer_ctx is not None and not context.canonical:", "and implicit_bases: assert astnode is not None astnode.declared_overloaded = True", "if non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema, with_parent=True) for b", "def _delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema:", "cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context:", "# This object is inherited from one or more ancestors", "astnode_cls = cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return", "ctx is None: raise RuntimeError(f'no referrer context for {cls}') return", "implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases:", "for b in old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)], )", "sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] = None ) ->", "refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema", "@classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str,", "child, existing) alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion( self, schema:", "alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in referrer.children(schema): assert", "context, referrer) return schema def _propagate_ref_creation( self, schema: s_schema.Schema, context:", "inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]:", "ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All references", "= context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def", "explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b in child_bases], [b.get_name(schema) for", "type(self) referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type", "sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls", "The ref in child should no longer exist. ref_del_cmd =", "else: bases = so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema", "object_stack = [] if type(self) != type(referrer): object_stack.append(referrer) while obj", ") ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else:", "str, referrer_name: str, context: sd.CommandContext, ) -> Tuple[str, ...]: return", "with alter.new_context(schema, context, child): schema, cmd = self._propagate_ref_deletion( schema, context,", "ancestry)}', context=self.source_context, ) elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError(", "self._classname_from_ast(schema, astnode, context) # We cannot check for ref existence", "rec = context.current().enable_recursion context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context) referrer", "str try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except", "return self.astnode[1] else: return self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema,", "of the `create/alter/etc type` command. \"\"\" ctxcls = cls.get_referrer_context_class() ctx", "referrer_ctx is not None and not context.canonical: self._validate(schema, context) return", "using the `overloaded` keyword because ' f'it is defined in", "context for {cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass", "True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(),", "str = sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref = qlast.ObjectRef(", "is None: return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema", "inherited # from another parent, so we need to do", "fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases =", "cls.get_referrer_context(context) if refctx is not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd,", "True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd def", "cmd def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]:", "-> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode):", "-> Tuple[str, ...]: return () @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema,", "if referrer_ctx is not None and not context.canonical: objcls =", "referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls)", "with_parent=True) return f'{vn} of {pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject,", "' f'must be declared using the `overloaded` keyword because '", "= ', '.join( b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases )", "derived_name: str = self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else:", "def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema: orig_schema", "the delta tree. Instead, generate a command group # containing", "#: purely inherited. is_local = so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909,", "= super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context) if refctx is", ") -> Tuple[s_schema.Schema, ReferencedT]: if name is None: derived_name: str", "s_schema.Schema, exprs: Iterable[str]) -> str: m = hashlib.sha1() for expr", "referrer, refdict.attr, self.classname) if implicit_bases: bases = self.get_attribute_value('bases') if bases:", "# # Copyright 2008-present MagicStack Inc. and the EdgeDB authors.", "None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer", "default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b ) ]", "raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared using the", "objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if", "True if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema = delta.apply(schema,", "command this would be the context of the `create/alter/etc type`", "OR CONDITIONS OF ANY KIND, either express or implied. #", "= referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert astnode is not", "import struct from edb.edgeql import ast as qlast from .", "ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool =", "= refcoll.get(schema, refname, default=None) if existing is not None: cmdcls:", "reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema)", "= type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema =", "existing) alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion( self, schema: s_schema.Schema,", "the License is distributed on an \"AS IS\" BASIS, #", "**kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not None: cls._referrer_context_class", "implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT,", "ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if subject_ctx is not", "return cmd def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) ->", "str, context: sd.CommandContext, ) -> Tuple[str, ...]: return () @classmethod", "is_local = so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def", "postpone that check until the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd(", "so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context)", "details=( f'{vn} is inherited from ' f'{bases_str}, which {verb} not", "__new__(mcls, name: str, bases: Tuple[type, ...], clsdct: Dict[str, Any], *,", ") -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode,", "method dynamically, with `subject = SchemaField` raise NotImplementedError def get_referrer(self,", "scls = self.scls implicit_bases = [ b for b in", "context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, )", "-> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases, clsdct, **kwargs) assert", "implicit_bases = self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname) if implicit_bases:", "isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer)", "child in referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict): continue alter", "cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin( self,", "self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema, context) else: referrer", "EdgeDB open source project. # # Copyright 2008-present MagicStack Inc.", "errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared using the `overloaded`", "assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class", "if refctx is not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject)", "is not None and not context.canonical: self._validate(schema, context) return schema", "-> Sequence[str]: mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases,", "m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]:", "ctx) @classmethod def get_referrer_context_or_die( cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]:", "= self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name =", "None] ) -> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion = False", ") -> s_schema.Schema: scls = self.scls referrer_class = type(referrer) mcls", "law or agreed to in writing, software # distributed under", "for a in ancestry)}', context=self.source_context, ) elif (not implicit_bases and", "super()._create_begin(schema, context) if referrer_ctx is not None and not context.canonical:", "= so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema = super()._create_begin(schema,", "vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=(", "rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject,", "= cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict =", "not renamed in the same op, and this is an", ". import inheriting from . import objects as so from", "rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd", "= mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name for", "None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer", "and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self, schema:", "are no ' f'ancestors defining it.', context=self.source_context, ) def _propagate_ref_op(self,", "r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec return", "from . import name as sn from . import utils", "schema=schema, ) except errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name =", "sn.Name) return name @classmethod def _classname_from_name( cls, name: sn.SchemaName, referrer_name:", "raise TypeError( f'referrer_context_class is not defined for {cls}') return cls._referrer_context_class", "def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject ) ->", "astnode = super()._get_ast( schema, context, parent_node=parent_node) if context.declarative: scls =", "DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema: s_schema.Schema,", "= base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context)", "may obtain a copy of the License at # #", "so.QualifiedObject, *qualifiers: str, mark_derived: bool = False, attrs: Optional[Dict[str, Any]]", "str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases", "bool = False, name: Optional[str] = None, **kwargs: Any, )", "compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: #", "in self.get_bases(schema).objects(schema) if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool]", "quals = cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context) pnn =", "and ref_astnode is not None: return ref_astnode else: if isinstance(self.astnode,", "sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema,", "None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str try:", "may not use this file except in compliance with the", "EdgeDB authors. # # Licensed under the Apache License, Version", "context, refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child):", "module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name:", "= True if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema =", "mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter =", "alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context)", "not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast( schema, context, parent_node=parent_node,", "return schema def _propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "= child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context, child, refdict.attr, name)", "True parent_cmd.add(cmd) schema = delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name)", "{vn}', details=( f'{vn} is inherited from ' f'{bases_str}, which {verb}", "# The ref in child should no longer exist. ref_del_cmd", "this file except in compliance with the License. # You", "# our referrer. schema = self._propagate_ref_creation(schema, context, referrer) return schema", "be declared using the `overloaded` keyword because ' f'it is", "creation of a new ref to descendants of # our", "schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self,", "= scls.get_is_local(schema) if not was_local and now_local: self._validate(schema, context) return", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context)", "# Cannot remove inherited objects. vn = scls.get_verbosename(schema, with_parent=True) parents", "context: sd.CommandContext, parent: ReferencedObject, name: str ) -> qlast.ObjectRef: #", "delta as sd from . import inheriting from . import", "is None: raise TypeError( f'referrer_context_class is not defined for {cls}')", "def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name: str", "= [ b for b in child_bases if b.generic(schema) and", "refctx = type(self).get_referrer_context(context) if refctx is not None: if not", "# time, because it might get created in a sibling", "= self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded", "in the same op, and this is an error. if", ") scls = self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema,", "from another parent, so we need to do a rebase.", "This source file is part of the EdgeDB open source", "ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls) # type: ignore return", "referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert", "from . import inheriting from . import objects as so", "# # Licensed under the Apache License, Version 2.0 (the", "raise NotImplementedError def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema)", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx", "referrer = refctx.scls assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases", "referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict", "= type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname,", "schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases =", "= refdict_whitelist if mark_derived: context.current().mark_derived = True if transient: context.current().transient_derivation", "= None, derived_name_base: Optional[str] = None, inheritance_merge: bool = True,", "derived: ReferencedT = schema.get(derived_name) return schema, derived def get_verbosename( self,", "s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context", "super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return", "referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname) class", "= referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema class", "to descendants of # our referrer. schema = self._propagate_ref_creation(schema, context,", "child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name,", "child, refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases: #", "= False, name: Optional[str] = None, **kwargs: Any, ) ->", "scls: so.Object, *, referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot,", "not context.canonical and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases =", "definition and is not #: purely inherited. is_local = so.SchemaField(", "referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls =", "parents = [ b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases ]", "bool = False, attrs: Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext]", "defined for {cls}') return cls._referrer_context_class @classmethod def get_referrer_context( cls, context:", "bool = False, ) -> str: vn = super().get_verbosename(schema) if", "Dict[str, object] = {} if attrs is not None: derived_attrs.update(attrs)", "ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item", "ast as qlast from . import delta as sd from", "self.scls implicit_bases = [ b for b in scls.get_bases(schema).objects(schema) if", "ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema: s_schema.Schema, context: sd.CommandContext, *,", "= ( f'inherited from {\", \".join(inherited_from)}' ) return astnode else:", "self.get_attribute_value('bases'), ) ] astnode.system_comment = ( f'inherited from {\", \".join(inherited_from)}'", "= self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class =", "context.canonical): if (not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases(", "isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and not context.declarative:", "parent_cmd = cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)):", "( f'inherited from {\", \".join(inherited_from)}' ) return astnode else: return", "if existing is not None: new_bases = derived_attrs['bases'] old_bases =", "name to shortname if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else:", "mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname", "in new_bases], ) def _validate( self, schema: s_schema.Schema, context: sd.CommandContext", "self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def _create_ref( self, schema:", "astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name = sn.Name(name) else:", "referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and not context.canonical", "default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT,", "return schema def derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject,", "return implicit_bases def get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext, refcls:", "context.descriptive_mode: astnode = super()._get_ast( schema, context, parent_node=parent_node, ) assert astnode", "child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema,", "_create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) ->", "cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if", "# Propagate the creation of a new ref to descendants", "def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, )", "Optional[sd.CommandContext] = None, derived_name_base: Optional[str] = None, inheritance_merge: bool =", "MagicStack Inc. and the EdgeDB authors. # # Licensed under", "or implied. # See the License for the specific language", "= None def __new__(mcls, name: str, bases: Tuple[type, ...], clsdct:", "# This source file is part of the EdgeDB open", "do a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref,", "f'cannot derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str, object] = {}", "parent_cmd.add(cmd) schema = delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name) return", "schema: s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema: orig_schema = schema", "= super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject)", "None cmd: sd.Command = delta for obj in reversed(object_stack): assert", "= get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject)", "context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases:", "sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert", "def delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject,", "-> Tuple[str, ...]: return () @classmethod def _classname_quals_from_name( cls, name:", "= self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer,", "scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs #", "and the EdgeDB authors. # # Licensed under the Apache", "for b in self.get_bases(schema).objects(schema) if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta):", "_transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None", "= self._get_implicit_ref_bases( schema, context, child, refdict.attr, name) cmd: sd.Command if", "astnode.declared_overloaded = True return astnode else: return super()._get_ast(schema, context, parent_node=parent_node)", "get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def delete(self, schema:", "expr in exprs: m.update(expr.encode()) return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema,", "self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class = type(referrer) mcls = type(scls)", "schema = self._propagate_ref_creation(schema, context, referrer) return schema def _propagate_ref_creation( self,", "schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) -> sd.Command:", "context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and", "] referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass()", "has an explicit definition and is not #: purely inherited.", "context.enable_recursion): # Propagate the creation of a new ref to", "type` command. \"\"\" ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls) #", "None: derived_name: str = self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base)", "-> str: m = hashlib.sha1() for expr in exprs: m.update(expr.encode())", "1 else 'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename", "not context.canonical and self.implicit: mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context)", "get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict =", "True return astnode else: return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin(", "= derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls = type(self)", "] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def", "cls = super().__new__(mcls, name, bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta)", "s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema: orig_schema = schema schema", "b in non_renamed_bases ) verb = 'are' if len(non_renamed_bases) >", "= referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return", "is needed to get the correct inherited name which will", "for b in non_renamed_bases ) verb = 'are' if len(non_renamed_bases)", "True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All references in a", "name) cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases: # Child is", "assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for", "implicit_bases, ) self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context) if referrer_ctx", "Propagate the creation of a new ref to descendants of", "s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls =", "inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema: s_schema.Schema, context: sd.CommandContext,", "s_schema.Schema: schema = super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema) and", "pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL,", "and Create(if_not_exists) # to postpone that check until the application", "self.add(alter) return schema def get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext,", "sd.CommandContext ) -> Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert isinstance(scls,", "sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] ) -> s_schema.Schema:", "This is needed to get the correct inherited name which", "f'{\", \".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context, ) elif (not", ") -> sn.Name: name = super()._classname_from_ast(schema, astnode, context) parent_ctx =", "except errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name = base_ref.get_name(schema) quals", "self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls)", "not None: cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls", "sn.Name: base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name,", "alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema:", "context.current().enable_recursion context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls", "a in ancestry)}', context=self.source_context, ) elif (not implicit_bases and self.get_attribute_value('declared_overloaded')):", "Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls, name: str, bases: Tuple[type,", "rename inherited {vn}', details=( f'{vn} is inherited from ' f'{bases_str},", "else 'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited", "assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str try: base_ref", "base_names = [ b.name for b in bases.items if b.name", "self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema:", "else: return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context:", "-> s_schema.Schema: schema = super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema)", "= referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded", "len(non_renamed_bases) > 1 else 'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError(", "= super()._get_ast( schema, context, parent_node=parent_node, ) assert astnode is not", "refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and not context.canonical): if (not", "at this # time, because it might get created in", "child): # This is needed to get the correct inherited", "application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists", ") except errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name = base_ref.get_name(schema)", "if refctx is not None: if not self.get_attribute_value('is_local'): if context.descriptive_mode:", "[] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child =", "referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and not context.canonical): if", "referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema,", ") -> Tuple[str, ...]: return () @classmethod def _name_qual_from_exprs(cls, schema:", "context.declarative: mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter", "link` command this would be the context of the `create/alter/etc", "referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool = False, attrs: Optional[Dict[str,", "existing is not None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context,", "self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer, so.InheritingObject)", "sd.CommandContext, ) -> s_schema.Schema: scls = self.scls was_local = scls.get_is_local(schema)", "qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode,", "get_referrer_context( cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context", "and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) -", "*quals) name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return name", "errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared `overloaded` as there", "to get the correct inherited name which will # either", "removed_bases=removed_bases, ) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd)", "super()._rename_begin(schema, context) scls = self.scls if not context.canonical and not", "self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname) if implicit_bases: bases =", "sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant", "cannot check for ref existence in this child at this", "else: obj = None cmd: sd.Command = delta for obj", "inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist is not None: context.current().inheritance_refdicts", "return super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext,", "if subject_ctx is not None and ref_astnode is not None:", "schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname, None) if existing is", "now_local: self._validate(schema, context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ):", "derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls", "= TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if the object", "that the object has been declared as # explicitly inherited.", "= name if self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot derive", "None, inheritance_merge: bool = True, preserve_path_id: Optional[bool] = None, refdict_whitelist:", "# are not renamed in the same op, and this", "s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name: str ) -> qlast.ObjectRef:", "ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer is not None d_alter_cmd", "referrer_cls = type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema", "of {pn}' return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): #", "def _propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, )", "schema, context, refname, self.scls) fq_name = self._classname_from_ast(schema, astnode, context) #", "= self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative", "is either defined locally or is inherited # from another", "schema.get(derived_name) return schema, derived def get_verbosename( self, schema: s_schema.Schema, *,", "_classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name: str,", "s_schema.Schema: scls = self.scls was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema,", "ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext )", "'.join( b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases ) verb =", "s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [ b for b in", "] new_bases = implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema) for", "be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name,", "qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name: name = super()._classname_from_ast(schema, astnode,", "b in new_bases], ) def _validate( self, schema: s_schema.Schema, context:", "is inherited from one or more ancestors that # are", "sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls = self.scls referrer_class", "inherited from ' f'{bases_str}, which {verb} not being renamed' ),", "b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases ] pnames = '\\n-", "return nref def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, )", "in writing, software # distributed under the License is distributed", "so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer,", "refdict.backref_attr) for b in implicit_bases ] pnames = '\\n- '.join(", "def _classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name:", "s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname:", "cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema =", "= self.get_referrer_context(context) implicit_bases = None if referrer_ctx is not None", "ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] ) -> s_schema.Schema: rec =", "-> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx is not None:", "that check until the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema,", "is not defined for {cls}') return cls._referrer_context_class @classmethod def get_referrer_context(", "rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str)", "s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls =", "= self._classname_from_name( self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema, fq_refname_in_child)", "cmd: sd.Command = delta for obj in reversed(object_stack): assert obj", "r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema,", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema): d_name =", "referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child =", "object_stack.append(referrer) while obj is not None: if isinstance(obj, ReferencedObject): obj", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "parent_item = parent_coll.get(schema, refname, default=None) if (parent_item is not None", "License, Version 2.0 (the \"License\"); # you may not use", "cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd", "schema: s_schema.Schema) -> Optional[so.Object]: return self.get_subject(schema) def delete(self, schema: s_schema.Schema)", "= False, ) -> str: vn = super().get_verbosename(schema) if with_parent:", "= super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass()", "child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context, child, refdict.attr, name) cmd:", "context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL,", "= self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if", "referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema):", "refdict.requires_explicit_overloaded and implicit_bases: assert astnode is not None astnode.declared_overloaded =", "def derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str,", "referrer_ctx.scls referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls)", "astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls, schema:", ") -> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases, clsdct, **kwargs)", "self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name", "refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases: # Child", "context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context)", "isinstance(self.astnode, (list, tuple)): return self.astnode[1] else: return self.astnode def _build_alter_cmd_stack(", "get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name: str )", ". import delta as sd from . import inheriting from", "new_bases != old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases(", "the License for the specific language governing permissions and #", "== derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself') derived_attrs:", "might get created in a sibling branch # of the", "return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast(", "schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: schema = super()._delete_innards(schema,", "context) # We cannot check for ref existence in this", "scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if not", "ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def", "_validate( self, schema: s_schema.Schema, context: sd.CommandContext ) -> None: scls", "= [] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child", "Alter(if_exists) and Create(if_not_exists) # to postpone that check until the", "astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx =", "= '\\n- '.join( p.get_verbosename(schema, with_parent=True) for p in parents )", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "Indicates that the object has been declared as # explicitly", "+ [ b for b in bases.objects(schema) if b not", "-> List[ReferencedInheritingObjectT]: return [ b for b in self.get_bases(schema).objects(schema) if", "super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext,", "'are' if len(non_renamed_bases) > 1 else 'is' vn = scls.get_verbosename(orig_schema)", "[ b for b in base_names if ( b !=", "context, referrer, refdict.attr, self.classname) if implicit_bases: bases = self.get_attribute_value('bases') if", "): # Indicates that the object has been declared as", "context, refdict, child, existing) alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion(", "sd.AlterObject, mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return", "for descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject)", "parent_ctx is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname", "until the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode,", "sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema,", "sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema,", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "mcls = type(self.scls) name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema,", "this is an error. if non_renamed_bases: bases_str = ', '.join(", "self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool", "not None: cls._referrer_context_class = referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT],", "= referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict =", "schema, context, astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if", "= context.current().enable_recursion context.current().enable_recursion = False referrer_ctx = self.get_referrer_context_or_die(context) referrer =", "class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context:", "obj = referrer object_stack = [] if type(self) != type(referrer):", "m = hashlib.sha1() for expr in exprs: m.update(expr.encode()) return m.hexdigest()", "schema = delta.apply(schema, context) return schema def derive_ref( self: ReferencedT,", "ctx = cls.get_referrer_context(context) if ctx is None: raise RuntimeError(f'no referrer", "str, bases: Tuple[type, ...], clsdct: Dict[str, Any], *, referrer_context_class: Optional[", "sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases(", "!= type(referrer): object_stack.append(referrer) while obj is not None: if isinstance(obj,", "ctx = context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod", "super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer, so.InheritingObject) and", "if child.get_is_derived(schema): # All references in a derived object must", "Inc. and the EdgeDB authors. # # Licensed under the", "refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]:", "for ref existence in this child at this # time,", "s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema,", "derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls =", "op, and this is an error. if non_renamed_bases: bases_str =", "implicit_bases: # Cannot remove inherited objects. vn = scls.get_verbosename(schema, with_parent=True)", "getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self, schema:", "if referrer_ctx is None: return super()._create_innards(schema, context) else: referrer =", "def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command,", ") -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise TypeError( f'referrer_context_class", "= cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module)", "f'inherited from {\", \".join(inherited_from)}' ) return astnode else: return None", "astnode, base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name", "bases.items if b.name is not None ] else: assert isinstance(bases,", "is inherited from ' f'{bases_str}, which {verb} not being renamed'", "if refdict_whitelist is not None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived:", "errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str, object] =", "class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls, schema:", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd,", "from __future__ import annotations from typing import * import hashlib", "an error. if non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema, with_parent=True)", "a `get_subject` # method dynamically, with `subject = SchemaField` raise", "= alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer):", "sd.CommandContext ) -> sn.Name: name = super()._classname_from_ast(schema, astnode, context) parent_ctx", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in", "= delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name) return schema, derived", "scls) else: for op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context)", "under the License. # from __future__ import annotations from typing", "class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema:", "= scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if", "child_ref.get_is_local(schema) or implicit_bases: # Child is either defined locally or", "return ref_astnode else: if isinstance(self.astnode, (list, tuple)): return self.astnode[1] else:", "Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases =", "non_renamed_bases ) verb = 'are' if len(non_renamed_bases) > 1 else", "remove inherited objects. vn = scls.get_verbosename(schema, with_parent=True) parents = [", "so from . import schema as s_schema from . import", "= sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls = type(referrer) alter_cmd =", "nref = cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls = cls.referenced_astnode astnode", "bref is not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} '", "[self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All", "refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls", "s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name: str, context: sd.CommandContext, )", "def _classname_quals_from_name( cls, name: sn.SchemaName, ) -> Tuple[str, ...]: return", "# We cannot check for ref existence in this child", "deleted_bases = set() for ctx in context.stack: if isinstance(ctx.op, type(self)):", "from:\\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child", "schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: scls = self.scls", "get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref", "the Apache License, Version 2.0 (the \"License\"); # you may", "alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion( self, schema: s_schema.Schema, context:", "cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod", "referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta =", "name: sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name: base_name = sn.shortname_from_fullname(name)", "= sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return name @classmethod def", "ReferencedObject, ): # Indicates that the object has been declared", "not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class", "not defined for {cls}') return cls._referrer_context_class @classmethod def get_referrer_context( cls,", "cls.get_referrer_context(context) if parent_ctx is not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name", "referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases:", "schema: s_schema.Schema, exprs: Iterable[str]) -> str: m = hashlib.sha1() for", "the application time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls])", "def _propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child:", "schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] =", "sd.RenameObject, ): def _rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext ) ->", "ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema,", "bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema)", "astnode, context) parent_ctx = cls.get_referrer_context(context) if parent_ctx is not None:", "-> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd", "(not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot", "RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child in", ") assert astnode is not None inherited_from = [ sn.quals_from_fullname(b)[0]", "cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if", "= referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls", "name: str ) -> qlast.ObjectRef: # reduce name to shortname", "= self.get_attribute_value('bases') if bases: bases = so.ObjectList.create( schema, implicit_bases +", "not context.canonical: self._validate(schema, context) return schema def _create_ref( self, schema:", "bases: Any, ) -> Sequence[str]: mcls = self.get_schema_metaclass() default_base =", "refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert astnode", "bref = obj.get_referrer(schema) assert bref is not None ancestry.append(bref) raise", "ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject)", "so.ObjectList.create( schema, implicit_bases + [ b for b in bases.objects(schema)", "), ) cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True)", "refcoll.get(schema, refname, default=None) if existing is not None: cmdcls: Type[sd.Command]", "is not None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child):", "= self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname)", "implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name)) deleted_bases =", "= None, transient: bool = False, name: Optional[str] = None,", "False, ) -> str: vn = super().get_verbosename(schema) if with_parent: subject", "# All references in a derived object must # also", "inheriting.AlterInheritingObject[ReferencedInheritingObjectT], ): @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation,", "= type(self) referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype =", "parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) ->", "self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref:", "def _create_begin( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema:", "name as sn from . import utils ReferencedT = TypeVar('ReferencedT',", "the EdgeDB open source project. # # Copyright 2008-present MagicStack", "for child in referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict): continue", "is inherited # from another parent, so we need to", "import ast as qlast from . import delta as sd", "-> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema))", "context: sd.CommandContext, ) -> s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx", "= hashlib.sha1() for expr in exprs: m.update(expr.encode()) return m.hexdigest() def", "s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: schema =", "else: for op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return", "parents: Any) -> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name',", "self.get_attribute_value('bases') if bases: bases = so.ObjectList.create( schema, implicit_bases + [", "schema, context, parent_node=parent_node) if context.declarative: scls = self.get_object(schema, context) assert", "refdict): continue alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): #", ") child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname,", "context.canonical and context.enable_recursion): # Propagate the creation of a new", "if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema = delta.apply(schema, context)", "Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any", "return astnode else: return None else: astnode = super()._get_ast( schema,", "context: sd.CommandContext, ) -> s_schema.Schema: if not context.canonical and self.implicit:", "None: raise RuntimeError(f'no referrer context for {cls}') return ctx class", ". import schema as s_schema from . import name as", "classes that inherit ReferencedObject define a `get_subject` # method dynamically,", "default_base ] new_bases = implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema)", "-> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return", "referrer: so.Object, ) -> s_schema.Schema: scls = self.scls referrer_class =", "now_local = scls.get_is_local(schema) if not was_local and now_local: self._validate(schema, context)", "under the License is distributed on an \"AS IS\" BASIS,", "isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name,", "type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema,", "alter.new_context(schema, context, child): schema, cmd = self._propagate_ref_deletion( schema, context, refdict,", "s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: scls = self.scls was_local", "= referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr,", "{pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in", "f'cannot be declared `overloaded` as there are no ' f'ancestors", "d_referrer is not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls(", "s_schema.Schema, context: sd.CommandContext, bases: Any, ) -> Sequence[str]: mcls =", "referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class)", "old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema,", "sd.Command if child_ref.get_is_local(schema) or implicit_bases: # Child is either defined", "referrer = referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema,", "sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module,", "referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and", "it.', context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls:", "for b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment", "-> s_schema.Schema: scls = self.scls was_local = scls.get_is_local(schema) schema =", "= get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, )", "in this child at this # time, because it might", "self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject,", "# of the delta tree. Instead, generate a command group", "is not None: new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema) if", "scls = self.scls referrer_class = type(referrer) mcls = type(scls) refdict", "reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema) -> Optional[so.Object]: # NB:", "if isinstance(self.astnode, (list, tuple)): return self.astnode[1] else: return self.astnode def", "referrer_ctx.scls schema = self._delete_ref(schema, context, referrer) return schema def _delete_ref(", "be the context of the `create/alter/etc type` command. \"\"\" ctxcls", "= referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname, default=None) if existing", "implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and not context.declarative: mcls =", "else: shortname = name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, )", "cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname", "refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item =", "= [ b.name for b in bases.items if b.name is", "\"\"\" ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls) # type: ignore", "= set() for ctx in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls)", "isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer object_stack =", "sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema,", "edb import errors from edb.common import struct from edb.edgeql import", "-> Tuple[s_schema.Schema, ReferencedT]: if name is None: derived_name: str =", "to do a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context,", "sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, )", "self._classname_from_name( self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing", "else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for", "in child should no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls)", "= self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls =", "f'ancestors defining it.', context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema, context:", "' f'{bases_str}, which {verb} not being renamed' ), context=self.source_context, )", "referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict", "scls = self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context,", "rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context,", "s_schema.Schema, context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] = None", "context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]:", "type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, )", "object_stack.append(obj) else: obj = None cmd: sd.Command = delta for", "[] if type(self) != type(referrer): object_stack.append(referrer) while obj is not", "referrer = referrer_ctx.scls schema = self._delete_ref(schema, context, referrer) return schema", "cmd = alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT],", ". import name as sn from . import utils ReferencedT", "derived_attrs['bases'] = so.ObjectList.create( schema, [self]) mcls = type(self) referrer_class =", "in scls.ordered_descendants(schema): d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer =", "not None and ref_astnode is not None: return ref_astnode else:", "because ' f'it is defined in the following ancestor(s): '", "if ( b != default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b)", "s_schema.Schema, context: sd.CommandContext, name: str, parent: ReferencedObject) -> qlast.ObjectDDL: nref", "name=shortname, module=parent.get_shortname(schema).module, ) return nref def _create_innards( self, schema: s_schema.Schema,", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for", "schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec return schema", "name is None: derived_name: str = self.get_derived_name( schema, referrer, *qualifiers,", "our referrer. schema = self._propagate_ref_creation(schema, context, referrer) return schema def", "referrer, refdict.attr, self_name)) deleted_bases = set() for ctx in context.stack:", "= get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject,", "utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name = sn.Name(name)", "ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def", "if referrer is None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema)", "in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return schema def _propagate_ref_rename(self,", "ref_alter_cmd = get_cmd(sd.AlterObject, mcls) cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: #", "return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema = self._create_ref(schema,", "(list, tuple)): return self.astnode[1] else: return self.astnode def _build_alter_cmd_stack( self,", "ANY KIND, either express or implied. # See the License", "class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls,", "inherited_from = [ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema, context,", "base_names = list(bases.names(schema)) # Filter out explicit bases implicit_bases =", "the License. # You may obtain a copy of the", "def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object, *,", "None: pn = subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}' return", "is not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def", "refname) if (isinstance(referrer, so.InheritingObject) and not context.canonical): if (not context.in_deletion(offset=1)", "reversed(object_stack): assert obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject,", "and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry", "Tuple[type, ...], clsdct: Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ]", "schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls = self.get_object(schema,", "b.name for b in bases.items if b.name is not None", "-> s_schema.Schema: orig_schema = schema schema = super()._rename_begin(schema, context) scls", "# See the License for the specific language governing permissions", "cls, name: sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name: base_name =", "astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name = self._classname_from_ast(schema,", "clsdct: Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None,", "get_verbosename( self, schema: s_schema.Schema, *, with_parent: bool = False, )", "is not None and ref_astnode is not None: return ref_astnode", "ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls =", "= self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def _create_ref( self,", "new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases != old_bases:", "): def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object,", "we need to do a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta(", "{\", \".join(inherited_from)}' ) return astnode else: return None else: astnode", "return schema def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject", "self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if subject_ctx is", "context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, ) ->", "in implicit_bases: bref = obj.get_referrer(schema) assert bref is not None", "as there are no ' f'ancestors defining it.', context=self.source_context, )", "= added_bases self.removed_bases = removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject(", "get_cmd(sd.AlterObject, mcls) ref_rebase_cmd = get_cmd(inheriting.RebaseInheritingObject, mcls) assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert", "referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child in referrer.children(schema): if not", "sd.CommandContext, name: str, parent: ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema,", "None inherited_from = [ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema,", "ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def", "parents ) raise errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn}", "TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if the object has", "type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls, context:", "ref_astnode else: if isinstance(self.astnode, (list, tuple)): return self.astnode[1] else: return", "name, bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is", ") cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return", "schema as s_schema from . import name as sn from", "been declared as # explicitly inherited. declared_overloaded = so.SchemaField( bool,", "mcls = type(self.scls) referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls)", "subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if", "refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name)", "referrer. schema = self._propagate_ref_creation(schema, context, referrer) return schema def _propagate_ref_creation(", ") -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls", "True, preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient:", "# # This source file is part of the EdgeDB", "bases = so.ObjectList.create( schema, implicit_bases + [ b for b", "with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name,", ") -> Sequence[str]: mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name() if", "command. \"\"\" ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls) # type:", "= delta for obj in reversed(object_stack): assert obj is not", "def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext ) ->", "None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared", "schema def derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers:", "with_parent=True) for b in non_renamed_bases ) verb = 'are' if", "bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not", "ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema,", "context: sd.CommandContext ) -> None: scls = self.scls implicit_bases =", "isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name for b in bases.items", "not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name))", "referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject,", "old_bases = existing.get_bases(schema) if new_bases != old_bases: assert isinstance(new_bases, so.ObjectList)", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge = False if", "= self.get_subject(schema) if subject is not None: pn = subject.get_verbosename(schema,", "def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name:", "[b.get_name(schema) for b in new_bases], ) def _validate( self, schema:", "= False, attrs: Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext] =", "raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared `overloaded` as", "drop inherited {vn}', context=self.source_context, details=f'{vn} is inherited from:\\n- {pnames}' )", "b for b in child_bases if b.generic(schema) and b.get_name(schema) !=", "implicit_bases = self._get_implicit_ref_bases( schema, context, child, refdict.attr, name) cmd: sd.Command", "writing, software # distributed under the License is distributed on", "Filter out explicit bases implicit_bases = [ b for b", "isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class =", "in referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict): continue alter =", "if attrs is not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases']", "the correct inherited name which will # either be created", "-> s_schema.Schema: scls = self.scls referrer_class = type(referrer) mcls =", "...]: return () @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str])", "be marked as derived, to be consistent # with derive_subtype().", "s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls)", "sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str) -> None: astnode", "implicit_bases = [ b for b in scls.get_bases(schema).objects(schema) if not", "= referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context,", "type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type", "child_refname, None) if existing is not None: alter = alter_cmd(classname=child.get_name(schema))", "schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context)", "referrer) return schema def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext,", "None, **kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if name is", "errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn} is inherited from:\\n-", "existing is not None: cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self))", "StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema:", "derive_ref( self: ReferencedT, schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived:", "self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if not", "same op, and this is an error. if non_renamed_bases: bases_str", "sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname = name nref", "defined in the following ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for a", "refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), )", "isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str try: base_ref =", "referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation,", "astnode else: return None else: astnode = super()._get_ast( schema, context,", "in child_bases], [b.get_name(schema) for b in new_bases], ) def _validate(", "referrer) return super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema, context:", "sn.SchemaName) and sn.shortname_from_fullname(b) != b ) ] return implicit_bases class", "or is inherited # from another parent, so we need", "assert astnode is not None astnode.declared_overloaded = True return astnode", "_propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject,", "[ b for b in bases.objects(schema) if b not in", "with_parent: bool = False, ) -> str: vn = super().get_verbosename(schema)", "verb = 'are' if len(non_renamed_bases) > 1 else 'is' vn", "isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context:", "get_ref_implicit_base_delta( self, schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT],", "if type(self) != type(referrer): object_stack.append(referrer) while obj is not None:", "= cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name =", "-> qlast.ObjectRef: # reduce name to shortname if sn.Name.is_qualified(name): shortname:", "child_bases = refcls.get_bases(schema).objects(schema) default_base = refcls.get_default_base_name() explicit_bases = [ b", "] = None, **kwargs: Any ) -> ReferencedObjectCommandMeta: cls =", "License. # from __future__ import annotations from typing import *", "_cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) ->", "self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls)", "sd.CommandContext, ) -> s_schema.Schema: if not context.canonical and self.implicit: mcls", "from . import objects as so from . import schema", "super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context) if refctx is not", "astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def", "= type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype =", "and not context.canonical): if (not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases", ") -> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion = False referrer_ctx", ") -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx is None:", "object has an explicit definition and is not #: purely", "None: if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast( schema,", "implicit_bases ], ) else: bases = so.ObjectList.create( schema, implicit_bases, )", "also be marked as derived, to be consistent # with", "# reduce name to shortname if sn.Name.is_qualified(name): shortname: str =", "so.Object, ) -> s_schema.Schema: scls = self.scls referrer_class = type(referrer)", "implicit_bases = scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs # This", "astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject,", "sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases = None", "schema = super()._create_begin(schema, context) if referrer_ctx is not None and", "schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema,", "modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name", "f'it is defined in the following ancestor(s): ' f'{\", \".join(a.get_shortname(schema)", "Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise TypeError( f'referrer_context_class is not", "parent_node: Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context)", "= scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot rename inherited {vn}', details=( f'{vn}", "def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx", "), context=self.source_context, ) if context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls)", "return astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent:", "-> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode',", "pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module) assert", "context: sd.CommandContext, referrer: so.InheritingObject, ) -> s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die", "implicit_bases + [ b for b in bases.objects(schema) if b", "d_name = descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert", "refctx = self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer, so.InheritingObject) refdict", "context) return schema def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext, scls:", "if self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from", "def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, )", "cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext", "ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject, mcls) ref_rebase_cmd =", "child.get_is_derived(schema): # All references in a derived object must #", "None if referrer_ctx is not None and not context.canonical: objcls", "b in bases.items if b.name is not None ] else:", "self.scls) return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ):", "= [ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'),", "from ' f'{bases_str}, which {verb} not being renamed' ), context=self.source_context,", "astnode, context) refctx = cls.get_referrer_context(context) if refctx is not None:", "so.InheritingObject) and not context.canonical and context.enable_recursion): # Propagate the creation", "return None else: astnode = super()._get_ast( schema, context, parent_node=parent_node) if", "self._delete_ref(schema, context, referrer) return schema def _delete_ref( self, schema: s_schema.Schema,", "cls, name: sn.SchemaName, ) -> Tuple[str, ...]: return () @classmethod", "sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd =", "context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema, context) removed_bases,", "and not context.declarative: mcls = self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject,", "isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin( self, schema: s_schema.Schema, context:", "to shortname if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname", "' f'cannot be declared `overloaded` as there are no '", "inherited {vn}', details=( f'{vn} is inherited from ' f'{bases_str}, which", "sn.SchemaName, ) -> Tuple[str, ...]: return () @classmethod def _name_qual_from_exprs(cls,", "not None: assert isinstance(parent_ctx.op, sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str", "= None, inheritance_merge: bool = True, preserve_path_id: Optional[bool] = None,", "s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool = False, attrs:", "derived_name = name if self.get_name(schema) == derived_name: raise errors.SchemaError( f'cannot", "an explicit definition and is not #: purely inherited. is_local", "ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(),", "raise RuntimeError(f'no referrer context for {cls}') return ctx class StronglyReferencedObjectCommand(", "= referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child in referrer.children(schema): if", "return self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls:", "referrer.children(schema): if not child.allow_ref_propagation(schema, context, refdict): continue alter = alter_cmd(classname=child.get_name(schema))", "or more ancestors that # are not renamed in the", "s_schema.Schema, *, with_parent: bool = False, ) -> str: vn", "inheriting from . import objects as so from . import", "context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None: return schema", "derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is not None: new_bases =", "Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any ) -> ReferencedObjectCommandMeta:", "return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode:", "so.Object, *, referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]:", "r_alter_cmd.new_context(schema, context, d_referrer): with d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd)", "containing Alter(if_exists) and Create(if_not_exists) # to postpone that check until", "if name is None: derived_name: str = self.get_derived_name( schema, referrer,", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "for {cls}') return cls._referrer_context_class @classmethod def get_referrer_context( cls, context: sd.CommandContext,", "typing import * import hashlib from edb import errors from", "implicit = struct.Field(bool, default=False) def apply( self, schema: s_schema.Schema, context:", "ReferencedT]: if name is None: derived_name: str = self.get_derived_name( schema,", "= set(implicit_bases) - context.renamed_objs # This object is inherited from", "cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise TypeError(", "astnode: qlast.NamedDDL, base_name: str, referrer_name: str, context: sd.CommandContext, ) ->", "!= b ) ] return implicit_bases class AlterReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.AlterInheritingObject[ReferencedInheritingObjectT],", "refcls.get_default_base_name() explicit_bases = [ b for b in child_bases if", "= alter_cmd return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ):", "objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name", "= \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self))", "if referrer_ctx is not None and not context.canonical: self._validate(schema, context)", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack(", "b in base_names if ( b != default_base and isinstance(b,", "sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd", "if (isinstance(referrer, so.InheritingObject) and not context.canonical): if (not context.in_deletion(offset=1) and", "must # also be marked as derived, to be consistent", "f'must be declared using the `overloaded` keyword because ' f'it", "context.canonical: objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject):", "-> str: vn = super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema)", "isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class =", "context: sd.CommandContext, name: str, parent: ReferencedObject) -> qlast.ObjectDDL: nref =", "referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases(", "new_bases], ) def _validate( self, schema: s_schema.Schema, context: sd.CommandContext )", "schema = super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx is", "context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit =", "keyword because ' f'it is defined in the following ancestor(s):", "schema class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema:", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "False referrer_ctx = self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class = type(referrer)", "base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name,", "sd.CommandContext, ) -> Tuple[str, ...]: return () @classmethod def _classname_quals_from_name(", "self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema,", "cmd = self._propagate_ref_deletion( schema, context, refdict, child, existing) alter.add(cmd) self.add(alter)", "the `overloaded` keyword because ' f'it is defined in the", "cls._referrer_context_class is None: raise TypeError( f'referrer_context_class is not defined for", "**kwargs: Any ) -> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases,", "derived_name: raise errors.SchemaError( f'cannot derive {self!r}({derived_name}) from itself') derived_attrs: Dict[str,", "for b in bases.objects(schema) if b not in implicit_bases ],", "-> None: scls = self.scls implicit_bases = [ b for", "f'cannot rename inherited {vn}', details=( f'{vn} is inherited from '", ") -> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls)", "', '.join( b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases ) verb", "= None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool = False,", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) for child in referrer.children(schema): assert isinstance(child,", "): def _delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) ->", "transient: context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd)", "-> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx is None: raise", "implicit_bases = None if referrer_ctx is not None and not", "super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "context, parent_node=parent_node, ) assert astnode is not None inherited_from =", "None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd)", "alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd", "context.declarative: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases =", "= qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref def _create_innards( self,", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema))", "# This is needed to get the correct inherited name", "for b in bases.items if b.name is not None ]", "cls.get_referrer_context(context) if ctx is None: raise RuntimeError(f'no referrer context for", "referrer_name: str, context: sd.CommandContext, ) -> Tuple[str, ...]: return ()", "self.get_attribute_value('is_local'): if context.descriptive_mode: astnode = super()._get_ast( schema, context, parent_node=parent_node, )", "cls._referrer_context_class @classmethod def get_referrer_context( cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]:", "parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name =", "correct inherited name which will # either be created or", "alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema, cmd =", "objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if", "Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if refctx is not None: if", "type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -= deleted_bases if implicit_bases: # Cannot remove", "str: vn = super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema) if", "ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base", "specific language governing permissions and # limitations under the License.", "{vn}', context=self.source_context, details=f'{vn} is inherited from:\\n- {pnames}' ) alter_cmd =", "preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool", "alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): # This is", "schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd", "existing = child_coll.get(schema, child_refname, None) if existing is not None:", "group # containing Alter(if_exists) and Create(if_not_exists) # to postpone that", "= refcls.get_default_base_name() explicit_bases = [ b for b in child_bases", "return name @classmethod def _classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName,", "f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared using the `overloaded` keyword", "b != default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b", "scls.get_is_local(schema) if not was_local and now_local: self._validate(schema, context) return schema", "Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name =", "context of the `create/alter/etc type` command. \"\"\" ctxcls = cls.get_referrer_context_class()", "b for b in self.get_bases(schema).objects(schema) if not b.generic(schema) ] class", "return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema, context:", "be created or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name(", "assert issubclass(ref_create_cmd, CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname", "def _validate( self, schema: s_schema.Schema, context: sd.CommandContext ) -> None:", "context: sd.CommandContext, ) -> s_schema.Schema: scls = self.scls was_local =", "class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT], ): implicit = struct.Field(bool, default=False) def", "not was_local and now_local: self._validate(schema, context) return schema class RebaseReferencedInheritingObject(", "removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema)", "implicit_bases: # Child is either defined locally or is inherited", "sd.CommandContext, ) -> s_schema.Schema: schema = super()._delete_innards(schema, context) referrer_ctx =", "= referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type for ref_base", "return vn class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates that", "# you may not use this file except in compliance", "cmdcls(classname=derived_name) for k, v in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing", "sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx is None: raise RuntimeError(f'no", "name = super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context) if parent_ctx", "from . import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT =", "mcls = type(self) referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype", "else: referrer = referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return", "assert bref is not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}", "for the referring object, if any. E.g. for a `create/alter/etc", "declared_overloaded = so.SchemaField( bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, )", "which will # either be created or rebased. ref_field_type =", "context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd)", "not None ] else: assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema))", "import name as sn from . import utils ReferencedT =", "context: sd.CommandContext, scls: so.Object, *, referrer: Optional[so.Object] = None )", "= child_coll.get(schema, child_refname, None) if existing is not None: alter", "referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def", "should no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd =", "bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if", "rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename)", "as s_schema from . import name as sn from .", "self.scls was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local =", "= None cmd: sd.Command = delta for obj in reversed(object_stack):", "= refctx.scls assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases =", "s_schema.Schema: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls = type(referrer)", "object] = {} if attrs is not None: derived_attrs.update(attrs) derived_attrs['name']", "with_parent: subject = self.get_subject(schema) if subject is not None: pn", "referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True)", "Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None)", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) -> sd.Command: cmd =", "= type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname =", "return m.hexdigest() def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) ->", "so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema),", "from {\", \".join(inherited_from)}' ) return astnode else: return None else:", "self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases", "not None and not context.canonical: objcls = self.get_schema_metaclass() referrer =", "schema: s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd", "if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True", "get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext, bases: Any, ) ->", "def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return", "context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) ->", "sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]: refctx", "fq_name = self._classname_from_ast(schema, astnode, context) # We cannot check for", "s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, )", "b in implicit_bases ] pnames = '\\n- '.join( p.get_verbosename(schema, with_parent=True)", "nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref def _create_innards(", "-> sn.Name: base_name = sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn =", "derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases != old_bases: assert isinstance(new_bases,", "*, parent_node: Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]: refctx =", "() @classmethod def _classname_quals_from_name( cls, name: sn.SchemaName, ) -> Tuple[str,", "it might get created in a sibling branch # of", "child should no longer exist. ref_del_cmd = get_cmd(sd.DeleteObject, mcls) cmd", "under the Apache License, Version 2.0 (the \"License\"); # you", "self.get_schema_metaclass() Alter = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname) return", "of the command for the referring object, if any. E.g.", "implicit_bases = self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls", "op.apply(schema, context) return schema def _propagate_ref_rename(self, schema: s_schema.Schema, context: sd.CommandContext,", "#: True if the object has an explicit definition and", "referrer: so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer) mcls =", "context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls = self.scls", "need to do a rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema,", "b for b in scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx", "return () @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) ->", "( b != default_base and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) !=", "# type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die( cls,", "context, referrer) return super()._create_innards(schema, context) def _create_ref( self, schema: s_schema.Schema,", "so we need to do a rebase. removed_bases, added_bases =", "schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [ b for b", "_rename_begin(self, schema: s_schema.Schema, context: sd.CommandContext ) -> s_schema.Schema: orig_schema =", "schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name", "child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname, None)", "s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if not context.canonical and", "getattr(self, 'referenced_astnode', None) if subject_ctx is not None and ref_astnode", "[self]) mcls = type(self) referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls)", "= 'are' if len(non_renamed_bases) > 1 else 'is' vn =", "dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str] = None, inheritance_merge: bool", "refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child in referrer.children(schema):", "context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls) else: for op in", "self_name, child.get_name(schema), ) child_refname = reftype.get_key_for_name( schema, fq_refname_in_child) existing =", "context.canonical: self._validate(schema, context) return schema def _create_ref( self, schema: s_schema.Schema,", "from itself') derived_attrs: Dict[str, object] = {} if attrs is", "= sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack(", "context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx) @classmethod def get_referrer_context_or_die(", "referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname, default=None) if existing is", "(not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context,", "error. if non_renamed_bases: bases_str = ', '.join( b.get_verbosename(schema, with_parent=True) for", "cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass()", "cls.get_referrer_context_or_die(context) referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls)", "inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "b.name is not None ] else: assert isinstance(bases, so.ObjectList) base_names", "in a derived object must # also be marked as", "referrer: so.Object, ) -> s_schema.Schema: schema = super()._create_ref(schema, context, referrer)", "not None: if isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else:", "context.current().inheritance_merge = False if refdict_whitelist is not None: context.current().inheritance_refdicts =", "the following ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for a in ancestry)}',", "utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class", ") else: bases = so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases)", "return [ b for b in self.get_bases(schema).objects(schema) if not b.generic(schema)", "rebased. ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode", "ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name = self._classname_from_ast(schema, astnode, context)", "= referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name", "with_parent=True) for p in parents ) raise errors.SchemaError( f'cannot drop", "if b.name is not None ] else: assert isinstance(bases, so.ObjectList)", "inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema) for b in", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "are not renamed in the same op, and this is", "in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema,", "= struct.Field(bool, default=False) def apply( self, schema: s_schema.Schema, context: sd.CommandContext,", "bool = True, preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] =", "delta = sd.DeltaRoot() if referrer is None: assert isinstance(scls, ReferencedObject)", "None: scls = self.scls implicit_bases = [ b for b", "self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: schema =", "issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema) for child", "referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema, context) removed_bases, added_bases", "= rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ):", "= [] if type(self) != type(referrer): object_stack.append(referrer) while obj is", "for obj in implicit_bases: bref = obj.get_referrer(schema) assert bref is", "context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation]", "context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] ) ->", "= self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class = referrer_ctx.op.get_schema_metaclass() refdict =", "rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={},", "refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self,", "return () @classmethod def _classname_quals_from_name( cls, name: sn.SchemaName, ) ->", "not #: purely inherited. is_local = so.SchemaField( bool, default=False, inheritable=False,", "schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT,", "fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll", "in a sibling branch # of the delta tree. Instead,", "None else: astnode = super()._get_ast( schema, context, parent_node=parent_node) if context.declarative:", "return cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name:", "inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext,", "default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema: s_schema.Schema) ->", "b in old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)], ) rebase_cmdcls", "created or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema,", "self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and", "self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return schema def _propagate_ref_rename(self, schema:", "\"\"\"Get the context of the command for the referring object,", "astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject(", "fq_refname_in_child) existing = child_coll.get(schema, child_refname, None) if existing is not", "context, refname, self.scls) fq_name = self._classname_from_ast(schema, astnode, context) # We", "refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer, refdict.attr,", "schema=schema, disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self)", "refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec", "_name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) -> str: m = hashlib.sha1()", "type(self.scls) name = child_ref.get_name(schema) implicit_bases = self._get_implicit_ref_bases( schema, context, child,", "context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass()", "and isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b ) ] return", "if b not in implicit_bases ], ) else: bases =", "s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T:", ") self.set_attribute_value('bases', bases) schema = super()._create_begin(schema, context) if referrer_ctx is", "): def _get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject,", "b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[", "cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context =", "DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema: s_schema.Schema, context:", ") cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd,", "CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema: s_schema.Schema, context:", "any. E.g. for a `create/alter/etc concrete link` command this would", "edb.common import struct from edb.edgeql import ast as qlast from", "not None inherited_from = [ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases(", "List[ReferencedInheritingObjectT]: return [ b for b in self.get_bases(schema).objects(schema) if not", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "] pnames = '\\n- '.join( p.get_verbosename(schema, with_parent=True) for p in", "context.declarative and scls.get_is_local(schema): if (implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')):", "with_parent=True)}: ' f'cannot be declared `overloaded` as there are no", "is not None and not context.canonical: objcls = self.get_schema_metaclass() referrer", "= referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if", "isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) # Filter out explicit bases", "context=self.source_context, ) elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema,", "for b in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self))", "sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd = cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={},", "def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) -> str: m =", "CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema: s_schema.Schema,", "referrer = scls.get_referrer(schema) obj = referrer object_stack = [] if", "self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases", "schema: s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None, )", "Apache License, Version 2.0 (the \"License\"); # you may not", "is not None ] else: assert isinstance(bases, so.ObjectList) base_names =", "as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name: str, parent: ReferencedObject) ->", "context: sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "either express or implied. # See the License for the", "more ancestors that # are not renamed in the same", "return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class(", "= self._delete_ref(schema, context, referrer) return schema def _delete_ref( self, schema:", "astnode: qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name: name = super()._classname_from_ast(schema,", "= list(bases.names(schema)) # Filter out explicit bases implicit_bases = [", "is not None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd", "*qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name if self.get_name(schema) ==", "None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived = True if", "Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if name is None: derived_name:", "ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child)", "sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) alter = Alter(classname=self.classname) return alter._get_ast_node(schema, context) else:", "alter.add(ref_create) self.add(alter) return schema def get_implicit_bases( self, schema: s_schema.Schema, context:", "parent_node=parent_node, ) assert astnode is not None inherited_from = [", "context, scls) else: for op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema,", "= cls.get_referrer_context_class() ctx = context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]],", "> 1 else 'is' vn = scls.get_verbosename(orig_schema) raise errors.SchemaDefinitionError( f'cannot", "assert isinstance(cls, ReferencedObjectCommandMeta) if referrer_context_class is not None: cls._referrer_context_class =", "= scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs # This object", "will # either be created or rebased. ref_field_type = type(child).get_field(refdict.attr).type", "import delta as sd from . import inheriting from .", "name nref = qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref def", "= [ b for b in scls.get_bases(schema).objects(schema) if not b.generic(schema)", "refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry = [] for obj in", "and not context.canonical and context.enable_recursion): # Propagate the creation of", "shortname if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name)) else: shortname =", "= super().__new__(mcls, name, bases, clsdct, **kwargs) assert isinstance(cls, ReferencedObjectCommandMeta) if", "str) -> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), )", "sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext, )", "None: new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema) if new_bases !=", "ref_astnode is not None: return ref_astnode else: if isinstance(self.astnode, (list,", "not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be", "and b.get_name(schema) != default_base ] new_bases = implicit_bases + explicit_bases", "and sn.shortname_from_fullname(b) != b ) ] return implicit_bases class AlterReferencedInheritingObject(", "scls, implicit_bases=implicit_bases, ) self.added_bases = added_bases self.removed_bases = removed_bases return", "return self.get_subject(schema) def delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls =", "schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls", "assert isinstance(cmd, sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self,", "= cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if", "struct.Field(bool, default=False) def apply( self, schema: s_schema.Schema, context: sd.CommandContext, )", "!= old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema)", "f'{bases_str}, which {verb} not being renamed' ), context=self.source_context, ) if", "referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname) if", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return", "consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True)", "not context.canonical and context.enable_recursion): # Propagate the creation of a", "errors.InvalidReferenceError: base_name = sn.Name(name) else: base_name = base_ref.get_name(schema) quals =", "self.astnode[1] else: return self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema, context:", "def __new__(mcls, name: str, bases: Tuple[type, ...], clsdct: Dict[str, Any],", "schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment = ( f'inherited from", "alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return delta, cmd class CreateReferencedObject(", "sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer) mcls", "b in self.get_bases(schema).objects(schema) if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass:", ") raise errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context, details=f'{vn} is", "ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "sd.CommandContext ) -> s_schema.Schema: orig_schema = schema schema = super()._rename_begin(schema,", "Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer is None: assert", "self.scls) fq_name = self._classname_from_ast(schema, astnode, context) # We cannot check", "# also be marked as derived, to be consistent #", "if existing is not None: cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject,", "time, because it might get created in a sibling branch", "reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "referrer = referrer_ctx.scls referrer_class = type(referrer) mcls = type(scls) refdict", "default=None) if (parent_item is not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item)", "rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases,", "super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self, schema:", "= None, **kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]: if name", "\\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd", "referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name,", "referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def", "op=delta)): schema = delta.apply(schema, context) return schema def derive_ref( self:", "elif (not implicit_bases and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: '", "context=self.source_context, ) if context.enable_recursion: schema = self._propagate_ref_rename(schema, context, scls) else:", "None) if subject_ctx is not None and ref_astnode is not", "@classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext,", "context, astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema):", "to postpone that check until the application time. ref_create =", "schema, context, referrer, refdict.attr, self_name)) deleted_bases = set() for ctx", "in old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)], ) rebase_cmdcls =", "implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases = refcls.get_bases(schema).objects(schema) default_base =", ") -> s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls) refdict", "else: derived_name = name if self.get_name(schema) == derived_name: raise errors.SchemaError(", "as derived, to be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True)", "-> sd.Command: cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return", "# containing Alter(if_exists) and Create(if_not_exists) # to postpone that check", "= rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema,", "assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name(", "= alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): # This is needed", "self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context) return", "= refctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert", "Sequence[str]: mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell):", "sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ]", "hashlib from edb import errors from edb.common import struct from", "astnode.system_comment = ( f'inherited from {\", \".join(inherited_from)}' ) return astnode", "context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the context of the", "ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] = True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] =", "is not #: purely inherited. is_local = so.SchemaField( bool, default=False,", "return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext,", "schema: s_schema.Schema, context: sd.CommandContext ) -> None: scls = self.scls", "_classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext ) -> sn.Name:", "not None: context.current().inheritance_refdicts = refdict_whitelist if mark_derived: context.current().mark_derived = True", "added_bases = inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema) for", "in reversed(object_stack): assert obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die(", "def get_referrer_context( cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get the", "referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer,", "for p in parents ) raise errors.SchemaError( f'cannot drop inherited", "be declared `overloaded` as there are no ' f'ancestors defining", "*, with_parent: bool = False, ) -> str: vn =", "isinstance(name, sn.Name) return name @classmethod def _classname_from_name( cls, name: sn.SchemaName,", "if context.declarative: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases", "assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases( schema,", "delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self))", "= referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject,", "None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases def get_ref_implicit_base_delta( self,", "context, child, refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases:", "astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast(", "= ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr,", "= self.get_referrer_context(context) if referrer_ctx is None: return super()._create_innards(schema, context) else:", "[b.get_name(schema) for b in child_bases], [b.get_name(schema) for b in new_bases],", "referrer_ctx is not None and not context.canonical: objcls = self.get_schema_metaclass()", "removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def", "delta tree. Instead, generate a command group # containing Alter(if_exists)", "cmd.add(rebase_cmd) else: # The ref in child should no longer", "def _ref_rename(alter_cmd: sd.Command, refname: str) -> None: astnode = rename_cmdcls.astnode(", "existence in this child at this # time, because it", "use this file except in compliance with the License. #", "\".join(inherited_from)}' ) return astnode else: return None else: astnode =", "= obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd: sd.Command =", "= schema schema = super()._rename_begin(schema, context) scls = self.scls if", ") -> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer is", "= type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast(", "context, referrer) return schema def _delete_ref( self, schema: s_schema.Schema, context:", "sd from . import inheriting from . import objects as", "object has been declared as # explicitly inherited. declared_overloaded =", "scls.get_implicit_bases(schema) if implicit_bases and not context.declarative: mcls = self.get_schema_metaclass() Alter", "self._get_implicit_ref_bases( schema, context, referrer=referrer, referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema,", "= self._propagate_ref_rename(schema, context, scls) else: for op in self.get_subcommands(type=sd.ObjectCommand): schema", "'.join( p.get_verbosename(schema, with_parent=True) for p in parents ) raise errors.SchemaError(", "added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject,", "ancestry = [] for obj in implicit_bases: bref = obj.get_referrer(schema)", "delta.apply(schema, context) return schema def derive_ref( self: ReferencedT, schema: s_schema.Schema,", "= super()._get_ast( schema, context, parent_node=parent_node) if context.declarative: scls = self.get_object(schema,", "referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema):", "_get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx =", "ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create)", "assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer is not", "sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls", "orig_schema = schema schema = super()._rename_begin(schema, context) scls = self.scls", "qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext,", "mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name = name if self.get_name(schema) == derived_name:", "so.ObjectList) base_names = list(bases.names(schema)) # Filter out explicit bases implicit_bases", "if not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass()", "context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases =", "one or more ancestors that # are not renamed in", "schema = delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name) return schema,", "new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context)", "for a `create/alter/etc concrete link` command this would be the", "@classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents:", "ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext,", "explicit_bases = [ b for b in child_bases if b.generic(schema)", "inherited objects. vn = scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema,", "{self!r}({derived_name}) from itself') derived_attrs: Dict[str, object] = {} if attrs", "= self.scls.get_name(schema) for child in referrer.children(schema): if not child.allow_ref_propagation(schema, context,", "for obj in reversed(object_stack): assert obj is not None alter_cmd_cls", "AlterReferencedInheritingObject) return cmd def _alter_begin( self, schema: s_schema.Schema, context: sd.CommandContext,", "if isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name for b in", "s_schema.Schema, astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd =", "cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema, context) return schema, cmd", "None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema, cmd", "in compliance with the License. # You may obtain a", "schema, context, child, refdict.attr, name) cmd: sd.Command if child_ref.get_is_local(schema) or", "obj.get_referrer(schema) object_stack.append(obj) else: obj = None cmd: sd.Command = delta", "= referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value( refdict.backref_attr, so.ObjectShell( name=referrer_name, schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local',", "software # distributed under the License is distributed on an", "@classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema, context: sd.CommandContext, parent: ReferencedObject, name:", "vn = scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr) for", "_delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) ->", "schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if not context.canonical", "= sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast(", "True) return cmd def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext )", "cmd = cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod", "= type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name,", "super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema = self._create_ref(schema, context,", "implicit_bases ] pnames = '\\n- '.join( p.get_verbosename(schema, with_parent=True) for p", "inherited. is_local = so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, )", "ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref in child should no", "context, self.get_attribute_value('bases'), ) ] astnode.system_comment = ( f'inherited from {\",", "is None: assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj =", "schema def _propagate_ref_creation( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject,", "sd.QualifiedObjectCommand) referrer_name = parent_ctx.op.classname base_name: str try: base_ref = utils.ast_to_object(", "= inheriting.delta_bases( [b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema) for b", "schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema, context) def _create_ref(", "b for b in bases.objects(schema) if b not in implicit_bases", "{cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]):", "for b in child_bases if b.generic(schema) and b.get_name(schema) != default_base", "else: return None else: astnode = super()._get_ast( schema, context, parent_node=parent_node)", "cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context) if refctx", "= sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name = child_ref.get_name(schema) implicit_bases =", "sd.CommandContext ) -> None: scls = self.scls implicit_bases = [", "alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): # This is needed to", "to be consistent # with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter =", "None) if existing is not None: alter = alter_cmd(classname=child.get_name(schema)) with", "not being renamed' ), context=self.source_context, ) if context.enable_recursion: schema =", ". import utils ReferencedT = TypeVar('ReferencedT', bound='ReferencedObject') ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT',", "because it might get created in a sibling branch #", "cls._referrer_context_class = referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ):", "renamed' ), context=self.source_context, ) if context.enable_recursion: schema = self._propagate_ref_rename(schema, context,", "Optional[so.Object]: return self.get_subject(schema) def delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls", "refdict.attr, self.classname) if implicit_bases: bases = self.get_attribute_value('bases') if bases: bases", "referrer_class.get_refdict_for_class(objcls) if refdict.requires_explicit_overloaded and implicit_bases: assert astnode is not None", "sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for k, v in", "sn.SchemaName, ) -> List[ReferencedInheritingObjectT]: assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema)", "child) if child.get_is_derived(schema): # All references in a derived object", "so.ObjectList.create( schema, [self]) mcls = type(self) referrer_class = type(referrer) refdict", "the `create/alter/etc type` command. \"\"\" ctxcls = cls.get_referrer_context_class() ctx =", "there are no ' f'ancestors defining it.', context=self.source_context, ) def", "RuntimeError(f'no referrer context for {cls}') return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT]", "None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create( schema, [self])", "bases: Tuple[type, ...], clsdct: Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]]", "limitations under the License. # from __future__ import annotations from", "reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname)", "reftype.get_key_for(schema, self.scls) r_alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, referrer_class) alter_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "str = self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived, derived_name_base=derived_name_base) else: derived_name", "implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict", "referrer_field=refdict.attr, fq_name=self.classname, ) scls = self.get_object(schema, context) removed_bases, added_bases =", "= self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject)", "class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod def get_referrer_context_class( cls, )", "= self.get_referrer_context(context) ref_astnode: Type[qlast.DDLOperation] = getattr(self, 'referenced_astnode', None) if subject_ctx", "return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def", "super()._get_ast( schema, context, parent_node=parent_node, ) assert astnode is not None", "if existing is not None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema,", "with the License. # You may obtain a copy of", "referrer_class = referrer_ctx.op.get_schema_metaclass() referrer_name = referrer_ctx.op.classname refdict = referrer_class.get_refdict_for_class(objcls) cmd.set_attribute_value(", "# with derive_subtype(). ref_create.set_attribute_value('is_derived', True) ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd(", "def apply( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema:", "nref def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) ->", "refdict_whitelist if mark_derived: context.current().mark_derived = True if transient: context.current().transient_derivation =", "if subject is not None: pn = subject.get_verbosename(schema, with_parent=True) return", "# limitations under the License. # from __future__ import annotations", "refname, self.scls) fq_name = self._classname_from_ast(schema, astnode, context) # We cannot", "str ) -> qlast.ObjectRef: # reduce name to shortname if", "self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, scls, implicit_bases=implicit_bases,", "cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type = type(referrer).get_field(referrer_field).type", "None: return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls schema =", "None ] else: assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) #", "dynamically, with `subject = SchemaField` raise NotImplementedError def get_referrer(self, schema:", "schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject ) -> s_schema.Schema: rename_cmdcls", "referrer_ctx is None: return schema else: referrer = referrer_ctx.scls schema", "parent_node=parent_node) if context.declarative: scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject)", "else: assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) # Filter out", "`create/alter/etc type` command. \"\"\" ctxcls = cls.get_referrer_context_class() ctx = context.get(ctxcls)", "not None: pn = subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}'", "k, v in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is not", "sibling branch # of the delta tree. Instead, generate a", "[ b.name for b in bases.items if b.name is not", "or rebased. ref_field_type = type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname)", "def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name: str, parent: ReferencedObject)", "s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls = self.get_object(schema, context)", "express or implied. # See the License for the specific", "which {verb} not being renamed' ), context=self.source_context, ) if context.enable_recursion:", "except in compliance with the License. # You may obtain", "bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if the object has an", "refname: str) -> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ),", "apply( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: if", "delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with context(sd.DeltaRootContext(schema=schema,", "this child at this # time, because it might get", "child_bases if b.generic(schema) and b.get_name(schema) != default_base ] new_bases =", "self, schema: s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None,", "= referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema", "ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls", "permissions and # limitations under the License. # from __future__", "objects as so from . import schema as s_schema from", "def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, )", "import hashlib from edb import errors from edb.common import struct", "() @classmethod def _name_qual_from_exprs(cls, schema: s_schema.Schema, exprs: Iterable[str]) -> str:", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "pn = subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}' return vn", "None and not context.canonical: self._validate(schema, context) return schema def _create_ref(", "command for the referring object, if any. E.g. for a", "_propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str],", "so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die", "schema: s_schema.Schema, astnode: qlast.NamedDDL, base_name: str, referrer_name: str, context: sd.CommandContext,", "is not None: cls._referrer_context_class = referrer_context_class return cls class ReferencedObjectCommandBase(", "if child_ref.get_is_local(schema) or implicit_bases: # Child is either defined locally", "ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext,", "context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge = False if refdict_whitelist", "scls.get_verbosename(schema, with_parent=True) parents = [ b.get_field_value(schema, refdict.backref_attr) for b in", "None: return ref_astnode else: if isinstance(self.astnode, (list, tuple)): return self.astnode[1]", "return schema def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer:", "name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return name @classmethod", "subject = self.get_subject(schema) if subject is not None: pn =", "CONDITIONS OF ANY KIND, either express or implied. # See", "implicit_bases = [ b for b in base_names if (", "referring object, if any. E.g. for a `create/alter/etc concrete link`", "= True, preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] = None,", "_referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls, name: str,", "context=self.source_context, ) def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject,", "edb.edgeql import ast as qlast from . import delta as", ") -> s_schema.Schema: rename_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd:", "in child_bases if b.generic(schema) and b.get_name(schema) != default_base ] new_bases", "and not context.canonical: objcls = self.get_schema_metaclass() referrer = referrer_ctx.scls if", "self.get_subject(schema) def delete(self, schema: s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die(", "implicit=True, added_bases=(), removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def", "mcls) cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema, context) return schema,", "context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls(", "qlast.ObjectRef: # reduce name to shortname if sn.Name.is_qualified(name): shortname: str", "b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context) objcls = self.get_schema_metaclass() referrer_class =", "context, parent_node=parent_node) if context.declarative: scls = self.get_object(schema, context) assert isinstance(scls,", "b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases ) verb = 'are'", "sd.ObjectCommand) delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self, referrer=referrer) with", "so.SchemaField( bool, default=False, inheritable=False, compcoef=0.909, reflection_method=so.ReflectionMethod.AS_LINK, ) def get_subject(self, schema:", "name: Optional[str] = None, **kwargs: Any, ) -> Tuple[s_schema.Schema, ReferencedT]:", "sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, mcls) for descendant in scls.ordered_descendants(schema): d_name = descendant.get_name(schema)", "str, mark_derived: bool = False, attrs: Optional[Dict[str, Any]] = None,", "_get_ast( self, schema: s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] =", "= referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema, derived_name) refcoll", "): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls, schema: s_schema.Schema, astnode:", "self.astnode def _build_alter_cmd_stack( self, schema: s_schema.Schema, context: sd.CommandContext, scls: so.Object,", "with context(sd.DeltaRootContext(schema=schema, op=delta)): schema = delta.apply(schema, context) return schema def", "if ctx is None: raise RuntimeError(f'no referrer context for {cls}')", "context.renamed_objs # This object is inherited from one or more", "from edb import errors from edb.common import struct from edb.edgeql", "schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: scls", "that # are not renamed in the same op, and", "ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema: s_schema.Schema, context:", "schema else: referrer = referrer_ctx.scls schema = self._delete_ref(schema, context, referrer)", "refctx.scls assert isinstance(referrer, so.InheritingObject) refdict = type(referrer).get_refdict_for_class(mcls) implicit_bases = self._get_implicit_ref_bases(", "fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema, refname, default=None)", "True if the object has an explicit definition and is", "CreateReferencedInheritingObject) assert issubclass(ref_rebase_cmd, RebaseReferencedInheritingObject) refdict = referrer_cls.get_refdict_for_class(mcls) parent_fq_refname = self.scls.get_name(schema)", "pnames = '\\n- '.join( p.get_verbosename(schema, with_parent=True) for p in parents", "[ b.get_field_value(schema, refdict.backref_attr) for b in implicit_bases ] pnames =", "delta, parent_cmd = cmd._build_alter_cmd_stack( schema, context, self) parent_cmd.add(cmd) with context(sd.DeltaRootContext(schema=schema,", "ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer object_stack = []", "[ b for b in scls.get_bases(schema).objects(schema) if not b.generic(schema) ]", "object must # also be marked as derived, to be", "context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context) if ctx", "rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases,", "super().get_verbosename(schema) if with_parent: subject = self.get_subject(schema) if subject is not", "scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None] ) -> s_schema.Schema: rec", "from . import delta as sd from . import inheriting", "context: sd.CommandContext, ) -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx", "base_name = sn.Name(name) else: base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast(", "refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls)", "= False if refdict_whitelist is not None: context.current().inheritance_refdicts = refdict_whitelist", "-> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise TypeError( f'referrer_context_class is", "not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with", "AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx = cls.get_referrer_context(context) if", "if implicit_bases: bases = self.get_attribute_value('bases') if bases: bases = so.ObjectList.create(", "referrer_class = type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname", "class ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates that the object", "in implicit_bases ], ) else: bases = so.ObjectList.create( schema, implicit_bases,", "s_schema.Schema, context: sd.CommandContext, *, parent_node: Optional[qlast.DDLOperation] = None, ) ->", "is not None: cmdcls: Type[sd.Command] = \\ sd.ObjectCommandMeta.get_command_class_or_die(sd.AlterObject, type(self)) else:", "obj in implicit_bases: bref = obj.get_referrer(schema) assert bref is not", "type(self) != type(referrer): object_stack.append(referrer) while obj is not None: if", "scls = self.get_object(schema, context) assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema)", "= [] for obj in implicit_bases: bref = obj.get_referrer(schema) assert", "reduce name to shortname if sn.Name.is_qualified(name): shortname: str = sn.shortname_from_fullname(sn.Name(name))", "ref_field_type = type(referrer).get_field(referrer_field).type for ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name(", "if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) implicit_bases", "and self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared", "schema, astnode, context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class", "# explicitly inherited. declared_overloaded = so.SchemaField( bool, default=False, compcoef=None, introspectable=False,", "and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry = [] for obj", "= scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass() refdict =", "set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name)) deleted_bases = set() for", "context) alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT],", "Optional[str] = None, inheritance_merge: bool = True, preserve_path_id: Optional[bool] =", "context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self_name)) deleted_bases", "refdict, child, existing) alter.add(cmd) self.add(alter) return schema def _propagate_ref_deletion( self,", "assert isinstance(scls, ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and not", "schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: schema", "delta for obj in reversed(object_stack): assert obj is not None", "schema = super()._create_ref(schema, context, referrer) if (not self.scls.get_is_final(schema) and isinstance(referrer,", "alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema, cmd = self._propagate_ref_deletion( schema,", "self.get_schema_metaclass() referrer = referrer_ctx.scls if isinstance(referrer, so.InheritingObject): referrer_class = referrer_ctx.op.get_schema_metaclass()", "referrer object_stack = [] if type(self) != type(referrer): object_stack.append(referrer) while", "is not None: if not self.get_attribute_value('is_local'): if context.descriptive_mode: astnode =", "context) if referrer_ctx is not None and not context.canonical: self._validate(schema,", "context: sd.CommandContext, referrer: so.Object, ) -> s_schema.Schema: referrer_cls = type(referrer)", "schema def _propagate_ref_deletion( self, schema: s_schema.Schema, context: sd.CommandContext, refdict: so.RefDict,", "mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases, removed_bases=removed_bases, ) ref_alter_cmd =", "= self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls)", "= super()._delete_innards(schema, context) referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None:", "schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb: Callable[[sd.Command, str], None]", "ReferencedT = schema.get(derived_name) return schema, derived def get_verbosename( self, schema:", "is defined in the following ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for", "qlast.NamedDDL, base_name: str, referrer_name: str, context: sd.CommandContext, ) -> Tuple[str,", "sn.shortname_from_fullname(name) quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return", "a sibling branch # of the delta tree. Instead, generate", "referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) self_name = self.scls.get_name(schema) schema =", "child in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll = child.get_field_value(schema, refdict.attr)", "[ sn.quals_from_fullname(b)[0] for b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), )", "super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context) if parent_ctx is not", "implicit_bases) rebase_cmd_cls = get_cmd(inheriting.RebaseInheritingObject, mcls) rebase_cmd = rebase_cmd_cls( classname=name, added_bases=added_bases,", "referrer=referrer) with context(sd.DeltaRootContext(schema=schema, op=delta)): if not inheritance_merge: context.current().inheritance_merge = False", "not context.canonical): if (not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases =", "type(self.scls) refdict = referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return", "base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) name =", "b in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd", "schema, context, refdict, child, existing) alter.add(cmd) self.add(alter) return schema def", "sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd = alter_cmd return", "if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context) referrer_class", "Tuple[s_schema.Schema, ReferencedT]: if name is None: derived_name: str = self.get_derived_name(", "is None: return schema else: referrer = referrer_ctx.scls schema =", "= delta.apply(schema, context) return schema def derive_ref( self: ReferencedT, schema:", "attrs: Optional[Dict[str, Any]] = None, dctx: Optional[sd.CommandContext] = None, derived_name_base:", "inheriting.RebaseInheritingObject, type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd)", ") def _validate( self, schema: s_schema.Schema, context: sd.CommandContext ) ->", "refdict.attr, self_name)) deleted_bases = set() for ctx in context.stack: if", "sd.Command]: delta = sd.DeltaRoot() if referrer is None: assert isinstance(scls,", "qlast.ObjectRef( name=shortname, module=parent.get_shortname(schema).module, ) return nref def _create_innards( self, schema:", "in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject, type(self)) rebase_cmd =", "astnode is not None inherited_from = [ sn.quals_from_fullname(b)[0] for b", "type(self)) rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context", "*quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema,", "list(bases.names(schema)) # Filter out explicit bases implicit_bases = [ b", "astnode is not None astnode.declared_overloaded = True return astnode else:", "# from __future__ import annotations from typing import * import", "sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return name @classmethod def _classname_from_name(", "sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema: s_schema.Schema, astnode: qlast.NamedDDL,", "added_bases self.removed_bases = removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT],", "base_name: str try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, )", "sd.AlterObject, referrer_class) for child in referrer.children(schema): assert isinstance(child, so.QualifiedObject) child_coll", "not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create( schema,", "class ReferencedObject(so.DerivableObject): #: True if the object has an explicit", "scls.get_implicit_bases(schema) non_renamed_bases = set(implicit_bases) - context.renamed_objs # This object is", "*, referrer: Optional[so.Object] = None ) -> Tuple[sd.DeltaRoot, sd.Command]: delta", "if transient: context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id = True", "removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema, ) assert", "name: sn.SchemaName, ) -> Tuple[str, ...]: return () @classmethod def", "_delete_innards( self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: schema", "sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd = cmd._build_alter_cmd_stack( schema,", "ref_alter = ref_alter_cmd(classname=fq_name, if_exists=True) ref_alter.add(ref_rebase_cmd( classname=fq_name, implicit=True, added_bases=(), removed_bases=(), ))", "' f'it is defined in the following ancestor(s): ' f'{\",", "assert isinstance(referrer, so.QualifiedObject) child_referrer_bases = referrer.get_bases(schema).objects(schema) implicit_bases = [] ref_field_type", "inheritance_merge: bool = True, preserve_path_id: Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]]", "base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context) pnn", "in derived_attrs.items(): cmd.set_attribute_value(k, v) if existing is not None: new_bases", "time. ref_create = ref_create_cmd.as_inherited_ref_cmd( schema, context, astnode, [self.scls]) ref_create.if_not_exists =", "try: base_ref = utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError:", "if b.generic(schema) and b.get_name(schema) != default_base ] new_bases = implicit_bases", "and isinstance(referrer, so.InheritingObject) and not context.canonical and context.enable_recursion): # Propagate", "check for ref existence in this child at this #", "schema = referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and not", "= self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname) if implicit_bases: bases", "'\\n- '.join( p.get_verbosename(schema, with_parent=True) for p in parents ) raise", "context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta, parent_cmd =", "file is part of the EdgeDB open source project. #", "cls(classname=cls._classname_from_ast(schema, astnode, context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls,", "v) if existing is not None: new_bases = derived_attrs['bases'] old_bases", "@classmethod def get_referrer_context( cls, context: sd.CommandContext, ) -> Optional[sd.ObjectCommandContext[so.Object]]: \"\"\"Get", "the command for the referring object, if any. E.g. for", "annotations from typing import * import hashlib from edb import", "def _get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls", ") self.added_bases = added_bases self.removed_bases = removed_bases return super().apply(schema, context)", "context: sd.CommandContext ) -> s_schema.Schema: orig_schema = schema schema =", "sd.Command, refname: str) -> None: astnode = rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname,", "super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if not was_local and now_local:", "isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for b in", "bases_str = ', '.join( b.get_verbosename(schema, with_parent=True) for b in non_renamed_bases", "mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names = [ b.name for b", "declared as # explicitly inherited. declared_overloaded = so.SchemaField( bool, default=False,", "obj is not None alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd", "base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name,", "existing = refcoll.get(schema, refname, default=None) if existing is not None:", "= super()._alter_begin(schema, context) now_local = scls.get_is_local(schema) if not was_local and", "context.current().transient_derivation = True if preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema", "d_alter_cmd.new_context(schema, context, descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context)", "= implicit_bases + explicit_bases return inheriting.delta_bases( [b.get_name(schema) for b in", "part of the EdgeDB open source project. # # Copyright", "get_referrer_context_or_die( cls, context: sd.CommandContext, ) -> sd.ObjectCommandContext[so.Object]: ctx = cls.get_referrer_context(context)", "return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject, ): def _rename_begin(self,", "ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls)", "= super()._rename_begin(schema, context) scls = self.scls if not context.canonical and", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "parent_coll.get(schema, refname, default=None) if (parent_item is not None and not", "sd.Command]: get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name = child_ref.get_name(schema)", "s_schema.Schema) -> s_schema.Schema: cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.DeleteObject, type(self)) cmd =", "in the following ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for a in", "= reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr, refname) class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT],", "in non_renamed_bases ) verb = 'are' if len(non_renamed_bases) > 1", "import * import hashlib from edb import errors from edb.common", "rename_cmdcls.astnode( new_name=qlast.ObjectRef( name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode,", "name: str, parent: ReferencedObject) -> qlast.ObjectDDL: nref = cls.get_inherited_ref_name(schema, context,", ") ] astnode.system_comment = ( f'inherited from {\", \".join(inherited_from)}' )", "concrete link` command this would be the context of the", "for b in child_bases], [b.get_name(schema) for b in new_bases], )", "subject_ctx is not None and ref_astnode is not None: return", "Optional[bool] = None, refdict_whitelist: Optional[AbstractSet[str]] = None, transient: bool =", "b.get_name(schema) != default_base ] new_bases = implicit_bases + explicit_bases return", "following ancestor(s): ' f'{\", \".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context,", "context) refctx = cls.get_referrer_context(context) if refctx is not None: cmd.set_attribute_value('is_local',", "sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name: base_name = sn.shortname_from_fullname(name) quals", "details=f'{vn} is inherited from:\\n- {pnames}' ) alter_cmd = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject,", "s_schema.Schema, context: sd.CommandContext ) -> None: scls = self.scls implicit_bases", "cmd = ref_alter_cmd(classname=name) cmd.add(rebase_cmd) else: # The ref in child", "DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema: s_schema.Schema, context:", ") return nref def _create_innards( self, schema: s_schema.Schema, context: sd.CommandContext,", "Optional[so.Object]: # NB: classes that inherit ReferencedObject define a `get_subject`", "= super()._classname_from_ast(schema, astnode, context) parent_ctx = cls.get_referrer_context(context) if parent_ctx is", "alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def get_implicit_bases( self, schema: s_schema.Schema,", "schema schema = super()._rename_begin(schema, context) scls = self.scls if not", "p.get_verbosename(schema, with_parent=True) for p in parents ) raise errors.SchemaError( f'cannot", "referrer_name, *quals) return sn.Name(name=pnn, module=referrer_name.module) @classmethod def _classname_quals_from_ast( cls, schema:", "`overloaded` keyword because ' f'it is defined in the following", "isinstance(obj, ReferencedObject): obj = obj.get_referrer(schema) object_stack.append(obj) else: obj = None", "self, schema: s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: scls =", ") -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx =", "and now_local: self._validate(schema, context) return schema class RebaseReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], inheriting.RebaseInheritingObject[ReferencedInheritingObjectT],", "def _get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field:", "None, derived_name_base: Optional[str] = None, inheritance_merge: bool = True, preserve_path_id:", "context) if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx = cls.get_referrer_context_or_die(context)", "None: cls._referrer_context_class = referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta,", "parent_ctx = cls.get_referrer_context(context) if parent_ctx is not None: assert isinstance(parent_ctx.op,", "was_local = scls.get_is_local(schema) schema = super()._alter_begin(schema, context) now_local = scls.get_is_local(schema)", "= get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd = get_cmd(sd.AlterObject,", "referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs: Any ) ->", "if not context.canonical and self.implicit: mcls = self.get_schema_metaclass() refctx =", "schema def _create_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object,", "= existing.get_bases(schema) if new_bases != old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases,", "= True return astnode else: return super()._get_ast(schema, context, parent_node=parent_node) def", "= ref_create_cmd.as_inherited_ref_ast( schema, context, refname, self.scls) fq_name = self._classname_from_ast(schema, astnode,", "context = sd.CommandContext( modaliases={}, schema=schema, ) assert isinstance(cmd, sd.ObjectCommand) delta,", "child): schema, cmd = self._propagate_ref_deletion( schema, context, refdict, child, existing)", "context: sd.CommandContext, ) -> sd.Command: cmd = super()._cmd_tree_from_ast(schema, astnode, context)", "Version 2.0 (the \"License\"); # you may not use this", "parent, so we need to do a rebase. removed_bases, added_bases", "b not in implicit_bases ], ) else: bases = so.ObjectList.create(", "is None: derived_name: str = self.get_derived_name( schema, referrer, *qualifiers, mark_derived=mark_derived,", "= self._classname_from_name( fq_name, ref_base.get_name(schema)) refname = ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll =", "\".join(a.get_shortname(schema) for a in ancestry)}', context=self.source_context, ) elif (not implicit_bases", "= get_cmd(sd.DeleteObject, mcls) cmd = ref_del_cmd(classname=name) schema = cmd.apply(schema, context)", "is not None astnode.declared_overloaded = True return astnode else: return", "context.canonical and self.implicit: mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer", "and not self.get_attribute_value('declared_overloaded')): ancestry = [] for obj in implicit_bases:", "self: ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [ b", "derived_name_base: Optional[str] = None, inheritance_merge: bool = True, preserve_path_id: Optional[bool]", "[] for obj in implicit_bases: bref = obj.get_referrer(schema) assert bref", "assert astnode is not None inherited_from = [ sn.quals_from_fullname(b)[0] for", "type(self.scls) referrer_cls = type(referrer) alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd =", "referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype", "either defined locally or is inherited # from another parent,", "s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name: sn.SchemaName, )", "= self.get_referrer_context_or_die(context) referrer = refctx.scls assert isinstance(referrer, so.InheritingObject) refdict =", "alter_cmd = get_cmd(sd.AlterObject, referrer_cls) ref_create_cmd = get_cmd(sd.CreateObject, mcls) ref_alter_cmd =", "fq_name=self.classname, ) scls = self.get_object(schema, context) removed_bases, added_bases = self.get_ref_implicit_base_delta(", "ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) objcls = self.get_schema_metaclass() referrer_class = refctx.op.get_schema_metaclass()", "by applicable law or agreed to in writing, software #", "inherited {vn}', context=self.source_context, details=f'{vn} is inherited from:\\n- {pnames}' ) alter_cmd", "self.removed_bases = removed_bases return super().apply(schema, context) class RenameReferencedInheritingObject( ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], sd.RenameObject,", "= self.scls referrer_class = type(referrer) mcls = type(scls) refdict =", "{cls}') return cls._referrer_context_class @classmethod def get_referrer_context( cls, context: sd.CommandContext, )", "assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod def get_inherited_ref_name(cls, schema: s_schema.Schema,", "so.InheritingObject) and not context.canonical): if (not context.in_deletion(offset=1) and not context.disable_dep_verification):", "obj.get_referrer(schema) assert bref is not None ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema,", "assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin( self, schema: s_schema.Schema,", "default=False) def apply( self, schema: s_schema.Schema, context: sd.CommandContext, ) ->", "non_renamed_bases = set(implicit_bases) - context.renamed_objs # This object is inherited", "cb: Callable[[sd.Command, str], None] ) -> s_schema.Schema: rec = context.current().enable_recursion", "class DeleteReferencedObjectCommand( ReferencedObjectCommand[ReferencedT], sd.DeleteObject[ReferencedT], ): def _delete_innards( self, schema: s_schema.Schema,", "super()._get_ast( schema, context, parent_node=parent_node) if context.declarative: scls = self.get_object(schema, context)", ") -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) implicit_bases = None if", "inheritable=False, ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT, schema: s_schema.Schema, )", "and context.enable_recursion): # Propagate the creation of a new ref", "ReferencedInheritingObject( so.DerivableInheritingObject, ReferencedObject, ): # Indicates that the object has", "cmd: sd.Command if child_ref.get_is_local(schema) or implicit_bases: # Child is either", "name=refname, ), ) rename_cmd = rename_cmdcls._rename_cmd_from_ast( schema, astnode, context) alter_cmd.add(rename_cmd)", "else: base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode, base_name,", "cls.referenced_astnode astnode = astnode_cls(name=nref) assert isinstance(astnode, qlast.ObjectDDL) return astnode @classmethod", "refname, default=None) if existing is not None: cmdcls: Type[sd.Command] =", "descendant.get_name(schema) assert isinstance(descendant, ReferencedObject) d_referrer = descendant.get_referrer(schema) assert d_referrer is", "True _referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None def __new__(mcls, name:", "else: astnode = super()._get_ast( schema, context, parent_node=parent_node) if context.declarative: scls", "= op.apply(schema, context) return schema def _propagate_ref_rename(self, schema: s_schema.Schema, context:", "schema, context, parent_node=parent_node, ) assert astnode is not None inherited_from", "preserve_path_id: context.current().preserve_path_id = True parent_cmd.add(cmd) schema = delta.apply(schema, context) derived:", "= self.get_referrer_context(context) if referrer_ctx is None: return schema else: referrer", "= getattr(self, 'referenced_astnode', None) if subject_ctx is not None and", "referrer_cls.get_refdict_for_class(mcls) schema = referrer.add_classref(schema, refdict.attr, self.scls) return schema class DeleteReferencedObjectCommand(", "context.current().enable_recursion = rec return schema class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT],", "context) else: referrer = referrer_ctx.scls schema = self._create_ref(schema, context, referrer)", "return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls,", "bases.objects(schema) if b not in implicit_bases ], ) else: bases", "self, schema: s_schema.Schema, context: sd.CommandContext ) -> None: scls =", "quals = cls._classname_quals_from_name(name) pnn = sn.get_specialized_name(base_name, referrer_name, *quals) return sn.Name(name=pnn,", "self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str, fq_name:", "= sn.Name(name) else: base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema,", "CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def _cmd_tree_from_ast( cls,", "f'{vn} is inherited from ' f'{bases_str}, which {verb} not being", "sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema, astnode, context) refctx", "Dict[str, Any], *, referrer_context_class: Optional[ Type[sd.ObjectCommandContext[so.Object]] ] = None, **kwargs:", "d_referrer = descendant.get_referrer(schema) assert d_referrer is not None d_alter_cmd =", "We cannot check for ref existence in this child at", "schema def get_implicit_bases( self, schema: s_schema.Schema, context: sd.CommandContext, bases: Any,", "applicable law or agreed to in writing, software # distributed", "= cls.get_referrer_context(context) if ctx is None: raise RuntimeError(f'no referrer context", "so.RefDict, child: so.InheritingObject, child_ref: ReferencedInheritingObjectT, ) -> Tuple[s_schema.Schema, sd.Command]: get_cmd", "= referrer_ctx.scls schema = self._delete_ref(schema, context, referrer) return schema def", "self.scls if not context.canonical and not scls.generic(schema): implicit_bases = scls.get_implicit_bases(schema)", "ReferencedInheritingObjectT, schema: s_schema.Schema, ) -> List[ReferencedInheritingObjectT]: return [ b for", "is not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] = so.ObjectList.create(", "context: sd.CommandContext, bases: Any, ) -> Sequence[str]: mcls = self.get_schema_metaclass()", "cls.get_referrer_context_class() ctx = context.get(ctxcls) # type: ignore return cast(Optional[sd.ObjectCommandContext[so.Object]], ctx)", "b in self.get_implicit_bases( schema, context, self.get_attribute_value('bases'), ) ] astnode.system_comment =", "cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod def", "rebase. removed_bases, added_bases = self.get_ref_implicit_base_delta( schema, context, child_ref, implicit_bases) rebase_cmd_cls", "s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: subject_ctx = self.get_referrer_context(context) ref_astnode:", "if (not context.in_deletion(offset=1) and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema,", "sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], ) -> inheriting.BaseDelta_T: child_bases =", "and self.implicit: mcls = self.get_schema_metaclass() refctx = self.get_referrer_context_or_die(context) referrer =", "mcls = self.get_schema_metaclass() default_base = mcls.get_default_base_name() if isinstance(bases, so.ObjectCollectionShell): base_names", "not None: alter = alter_cmd(classname=child.get_name(schema)) with alter.new_context(schema, context, child): schema,", "ReferencedInheritingObjectT = TypeVar('ReferencedInheritingObjectT', bound='ReferencedInheritingObject') class ReferencedObject(so.DerivableObject): #: True if the", "[b.get_name(schema) for b in old_bases.objects(schema)], [b.get_name(schema) for b in new_bases.objects(schema)],", "bases = self.get_attribute_value('bases') if bases: bases = so.ObjectList.create( schema, implicit_bases", "= subject.get_verbosename(schema, with_parent=True) return f'{vn} of {pn}' return vn class", "str: m = hashlib.sha1() for expr in exprs: m.update(expr.encode()) return", "= referrer_class.get_refdict_for_class(objcls) implicit_bases = self._get_implicit_ref_bases( schema, context, referrer, refdict.attr, self.classname)", "None: return schema else: referrer = referrer_ctx.scls schema = self._delete_ref(schema,", "= child.get_field_value(schema, refdict.attr) fq_refname_in_child = self._classname_from_name( self_name, child.get_name(schema), ) child_refname", "ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases, clsdct, **kwargs) assert isinstance(cls,", "type(referrer): object_stack.append(referrer) while obj is not None: if isinstance(obj, ReferencedObject):", "type(child).get_field(refdict.attr).type refname = ref_field_type.get_key_for_name( schema, parent_fq_refname) astnode = ref_create_cmd.as_inherited_ref_ast( schema,", "_get_implicit_ref_bases( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.InheritingObject, referrer_field: str,", "# from another parent, so we need to do a", "= descendant.get_referrer(schema) assert d_referrer is not None d_alter_cmd = alter_cmdcls(classname=d_name)", "ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod def _classname_from_ast(cls, schema: s_schema.Schema, astnode: qlast.NamedDDL, context: sd.CommandContext", "delta.apply(schema, context) derived: ReferencedT = schema.get(derived_name) return schema, derived def", "= so.ObjectList.create( schema, [self]) mcls = type(self) referrer_class = type(referrer)", "Any ) -> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name, bases, clsdct,", "cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext,", "astnode: qlast.DDLOperation, context: sd.CommandContext, ) -> AlterReferencedInheritingObject[ReferencedInheritingObjectT]: cmd = super()._cmd_tree_from_ast(schema,", "derived def get_verbosename( self, schema: s_schema.Schema, *, with_parent: bool =", "set() for ctx in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases", "* import hashlib from edb import errors from edb.common import", "referrer_name, *quals) name = sn.Name(name=pnn, module=referrer_name.module) assert isinstance(name, sn.Name) return", "# You may obtain a copy of the License at", "-> s_schema.Schema: referrer_cls = type(referrer) mcls = type(self.scls) refdict =", "child at this # time, because it might get created", "assert d_referrer is not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd =", "if any. E.g. for a `create/alter/etc concrete link` command this", "classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context = sd.CommandContext( modaliases={}, schema=schema,", "of # our referrer. schema = self._propagate_ref_creation(schema, context, referrer) return", "(parent_item is not None and not parent_item.get_is_final(schema)): implicit_bases.append(parent_item) return implicit_bases", "None, **kwargs: Any ) -> ReferencedObjectCommandMeta: cls = super().__new__(mcls, name,", "astnode, [self.scls]) ref_create.if_not_exists = True ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): #", "class CreateReferencedInheritingObject( CreateReferencedObject[ReferencedInheritingObjectT], inheriting.CreateInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_ast( self, schema:", "schemaclass=referrer_class, ), ) cmd.set_attribute_value('is_local', True) if getattr(astnode, 'is_abstract', None): cmd.set_attribute_value('is_abstract',", "reftype.get_key_for_name( schema, fq_refname_in_child) existing = child_coll.get(schema, child_refname, None) if existing", "alter_cmd_cls = sd.ObjectCommandMeta.get_command_class_or_die( sd.AlterObject, type(obj)) alter_cmd = alter_cmd_cls(classname=obj.get_name(schema)) cmd.add(alter_cmd) cmd", ") def _propagate_ref_op(self, schema: s_schema.Schema, context: sd.CommandContext, scls: ReferencedInheritingObject, cb:", "for b in new_bases], ) def _validate( self, schema: s_schema.Schema,", "] else: assert isinstance(bases, so.ObjectList) base_names = list(bases.names(schema)) # Filter", "@classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema, context: sd.CommandContext, name: str, parent:", "for ctx in context.stack: if isinstance(ctx.op, type(self)): deleted_bases.add(ctx.op.scls) implicit_bases -=", "bases = so.ObjectList.create( schema, implicit_bases, ) self.set_attribute_value('bases', bases) schema =", "descendant): cb(d_alter_cmd, refname) r_alter_cmd.add(d_alter_cmd) schema = r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion", "cls._classname_quals_from_ast( schema, astnode, base_name, referrer_name, context) pnn = sn.get_specialized_name(base_name, referrer_name,", "object, if any. E.g. for a `create/alter/etc concrete link` command", "super()._cmd_tree_from_ast(schema, astnode, context) if isinstance(astnode, cls.referenced_astnode): objcls = cls.get_schema_metaclass() referrer_ctx", "self.get_bases(schema).objects(schema) if not b.generic(schema) ] class ReferencedObjectCommandMeta(sd.ObjectCommandMeta): _transparent_adapter_subclass: ClassVar[bool] =", "alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema)) with r_alter_cmd.new_context(schema, context, d_referrer): with", "ReferencedInheritingObject) implicit_bases = scls.get_implicit_bases(schema) if implicit_bases and not context.declarative: mcls", "referrer_ctx is None: return super()._create_innards(schema, context) else: referrer = referrer_ctx.scls", "is not None d_alter_cmd = alter_cmdcls(classname=d_name) r_alter_cmd = r_alter_cmdcls( classname=d_referrer.get_name(schema))", "reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing = refcoll.get(schema, refname,", "renamed in the same op, and this is an error.", "assert isinstance(scls, ReferencedObject) referrer = scls.get_referrer(schema) obj = referrer object_stack", "= cls.get_inherited_ref_name(schema, context, parent, name) astnode_cls = cls.referenced_astnode astnode =", "-> Tuple[sd.DeltaRoot, sd.Command]: delta = sd.DeltaRoot() if referrer is None:", "removed_bases=(), )) alter.add(ref_alter) alter.add(ref_create) self.add(alter) return schema def get_implicit_bases( self,", "bool, default=False, compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases( self:", "attrs is not None: derived_attrs.update(attrs) derived_attrs['name'] = derived_name derived_attrs['bases'] =", "Copyright 2008-present MagicStack Inc. and the EdgeDB authors. # #", "another parent, so we need to do a rebase. removed_bases,", "in parents ) raise errors.SchemaError( f'cannot drop inherited {vn}', context=self.source_context,", "= utils.ast_to_object( astnode.name, modaliases=context.modaliases, schema=schema, ) except errors.InvalidReferenceError: base_name =", "get_referrer_context_class( cls, ) -> Type[sd.ObjectCommandContext[so.Object]]: if cls._referrer_context_class is None: raise", "Alter(classname=self.classname) return alter._get_ast_node(schema, context) else: return super()._get_ast_node(schema, context) @classmethod def", "schema, cmd = self._propagate_ref_deletion( schema, context, refdict, child, existing) alter.add(cmd)", "errors from edb.common import struct from edb.edgeql import ast as", "not None: return ref_astnode else: if isinstance(self.astnode, (list, tuple)): return", "None astnode.declared_overloaded = True return astnode else: return super()._get_ast(schema, context,", "governing permissions and # limitations under the License. # from", "Optional[qlast.DDLOperation] = None, ) -> Optional[qlast.DDLOperation]: refctx = type(self).get_referrer_context(context) if", "for op in self.get_subcommands(type=sd.ObjectCommand): schema = op.apply(schema, context) return schema", "self.get_referrer_context(context) implicit_bases = None if referrer_ctx is not None and", "= r_alter_cmd.apply(schema, context) self.add(r_alter_cmd) context.current().enable_recursion = rec return schema class", "= cmdcls(classname=self.get_name(schema)) context = sd.CommandContext( modaliases={}, schema=schema, disable_dep_verification=True, ) delta,", ") -> Tuple[str, ...]: return () @classmethod def _classname_quals_from_name( cls,", "= referrer_context_class return cls class ReferencedObjectCommandBase( sd.QualifiedObjectCommand[ReferencedT], metaclass=ReferencedObjectCommandMeta, ): @classmethod", "for b in base_names if ( b != default_base and", "return delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]]", "self, schema: s_schema.Schema, context: sd.CommandContext, refcls: ReferencedInheritingObjectT, implicit_bases: List[ReferencedInheritingObjectT], )", "context) derived: ReferencedT = schema.get(derived_name) return schema, derived def get_verbosename(", "project. # # Copyright 2008-present MagicStack Inc. and the EdgeDB", "ancestry.append(bref) raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)} ' f'must be declared using", "self.classname) if implicit_bases: bases = self.get_attribute_value('bases') if bases: bases =", "context)) cmd.set_attribute_value('name', cmd.classname) return cmd @classmethod def as_inherited_ref_ast(cls, schema: s_schema.Schema,", "= referrer_ctx.scls schema = self._create_ref(schema, context, referrer) return super()._create_innards(schema, context)", "refname = reftype.get_key_for_name(schema, derived_name) refcoll = referrer.get_field_value(schema, refdict.attr) existing =", ") -> List[ReferencedInheritingObjectT]: return [ b for b in self.get_bases(schema).objects(schema)", "_classname_from_name( cls, name: sn.SchemaName, referrer_name: sn.SchemaName, ) -> sn.Name: base_name", "schema: s_schema.Schema, referrer: so.QualifiedObject, *qualifiers: str, mark_derived: bool = False,", "obj in reversed(object_stack): assert obj is not None alter_cmd_cls =", "= ref_field_type.get_key_for_name(schema, fq_name_in_child) parent_coll = ref_base.get_field_value(schema, referrer_field) parent_item = parent_coll.get(schema,", "isinstance(b, sn.SchemaName) and sn.shortname_from_fullname(b) != b ) ] return implicit_bases", "] astnode.system_comment = ( f'inherited from {\", \".join(inherited_from)}' ) return", "context of the command for the referring object, if any.", "reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for(schema, self.scls) return referrer.del_classref(schema, refdict.attr,", "type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str) -> None: astnode =", ") -> s_schema.Schema: referrer_ctx = self.get_referrer_context(context) if referrer_ctx is None:", "get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) referrer_cls = type(referrer) alter_cmd", "self.scls) self_name = self.scls.get_name(schema) schema = referrer.del_classref(schema, refdict.attr, refname) if", "= self.get_referrer_context_or_die(context) referrer = referrer_ctx.scls referrer_class = type(referrer) mcls =", "from . import schema as s_schema from . import name", "referrer_ctx.op.get_schema_metaclass() refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if (implicit_bases", "None: cmd.set_attribute_value('is_local', True) assert isinstance(cmd, AlterReferencedInheritingObject) return cmd def _alter_begin(", "type(referrer) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname = reftype.get_key_for_name(schema,", "refdict = referrer_class.get_refdict_for_class(objcls) if context.declarative and scls.get_is_local(schema): if (implicit_bases and", "schema def _delete_ref( self, schema: s_schema.Schema, context: sd.CommandContext, referrer: so.Object,", "self.scls referrer_class = type(referrer) mcls = type(scls) refdict = referrer_class.get_refdict_for_class(mcls)", "transient: bool = False, name: Optional[str] = None, **kwargs: Any,", "s_schema.Schema, context: sd.CommandContext, ) -> s_schema.Schema: schema = super()._delete_innards(schema, context)", "else: return super()._get_ast(schema, context, parent_node=parent_node) def _create_begin( self, schema: s_schema.Schema,", "\"License\"); # you may not use this file except in", "self.get_attribute_value('declared_overloaded')): raise errors.SchemaDefinitionError( f'{self.scls.get_verbosename(schema, with_parent=True)}: ' f'cannot be declared `overloaded`", "and not context.disable_dep_verification): implicit_bases = set(self._get_implicit_ref_bases( schema, context, referrer, refdict.attr,", "tuple)): return self.astnode[1] else: return self.astnode def _build_alter_cmd_stack( self, schema:", "return ctx class StronglyReferencedObjectCommand( ReferencedObjectCommandBase[ReferencedT] ): pass class ReferencedObjectCommand(ReferencedObjectCommandBase[ReferencedT]): @classmethod", "sn.Name(name) else: base_name = base_ref.get_name(schema) quals = cls._classname_quals_from_ast( schema, astnode,", "SchemaField` raise NotImplementedError def get_referrer(self, schema: s_schema.Schema) -> Optional[so.Object]: return", "b in scls.get_bases(schema).objects(schema) if not b.generic(schema) ] referrer_ctx = self.get_referrer_context_or_die(context)", "not None astnode.declared_overloaded = True return astnode else: return super()._get_ast(schema,", "s_schema.Schema: scls = self.scls referrer_class = type(referrer) mcls = type(scls)", "if (implicit_bases and refdict.requires_explicit_overloaded and not self.get_attribute_value('declared_overloaded')): ancestry = []", "context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls = self.get_object(schema, context) assert", "ref_create.set_attribute_value(refdict.backref_attr, child) if child.get_is_derived(schema): # All references in a derived", "schema, derived def get_verbosename( self, schema: s_schema.Schema, *, with_parent: bool", "= sd.ObjectCommandMeta.get_command_class_or_die( sd.RenameObject, type(scls)) def _ref_rename(alter_cmd: sd.Command, refname: str) ->", "[b.get_name(schema) for b in new_bases.objects(schema)], ) rebase_cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( inheriting.RebaseInheritingObject,", "class ReferencedInheritingObjectCommand( ReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.InheritingObjectCommand[ReferencedInheritingObjectT], ): def _get_implicit_ref_bases( self, schema: s_schema.Schema,", "-> s_schema.Schema: rec = context.current().enable_recursion context.current().enable_recursion = False referrer_ctx =", "[ b for b in self.get_bases(schema).objects(schema) if not b.generic(schema) ]", "existing is not None: new_bases = derived_attrs['bases'] old_bases = existing.get_bases(schema)", "get_cmd = sd.ObjectCommandMeta.get_command_class_or_die mcls = type(self.scls) name = child_ref.get_name(schema) implicit_bases", "alter_cmd.add(rename_cmd) return self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT],", "'is_abstract', None): cmd.set_attribute_value('is_abstract', True) return cmd def _get_ast_node(self, schema: s_schema.Schema,", "if implicit_bases and not context.declarative: mcls = self.get_schema_metaclass() Alter =", "None, dctx: Optional[sd.CommandContext] = None, derived_name_base: Optional[str] = None, inheritance_merge:", "the License. # from __future__ import annotations from typing import", "return inheriting.delta_bases( [b.get_name(schema) for b in child_bases], [b.get_name(schema) for b", "All references in a derived object must # also be", "# method dynamically, with `subject = SchemaField` raise NotImplementedError def", "the object has an explicit definition and is not #:", "cmdcls = sd.ObjectCommandMeta.get_command_class_or_die( sd.CreateObject, type(self)) cmd = cmdcls(classname=derived_name) for k,", "s_schema.Schema, context: sd.CommandContext, astnode: qlast.ObjectDDL, parents: Any) -> sd.Command: cmd", "rebase_cmd = rebase_cmdcls( classname=derived_name, added_bases=added_bases, removed_bases=removed_bases, ) cmd.add(rebase_cmd) context =", "context) else: return super()._get_ast_node(schema, context) @classmethod def as_inherited_ref_cmd(cls, schema: s_schema.Schema,", "= type(scls) refdict = referrer_class.get_refdict_for_class(mcls) reftype = referrer_class.get_field(refdict.attr).type refname =", "self._propagate_ref_op(schema, context, scls, cb=_ref_rename) class DeleteReferencedInheritingObject( DeleteReferencedObjectCommand[ReferencedInheritingObjectT], inheriting.DeleteInheritingObject[ReferencedInheritingObjectT], ReferencedInheritingObjectCommand[ReferencedInheritingObjectT], ):", "= referrer.del_classref(schema, refdict.attr, refname) if (isinstance(referrer, so.InheritingObject) and not context.canonical):", "delta, cmd class CreateReferencedObject( ReferencedObjectCommand[ReferencedT], sd.CreateObject[ReferencedT], ): referenced_astnode: ClassVar[Type[qlast.ObjectDDL]] @classmethod", "old_bases: assert isinstance(new_bases, so.ObjectList) removed_bases, added_bases = inheriting.delta_bases( [b.get_name(schema) for", "_get_ast_node(self, schema: s_schema.Schema, context: sd.CommandContext ) -> Type[qlast.DDLOperation]: scls =", "a new ref to descendants of # our referrer. schema", "compcoef=None, introspectable=False, inheritable=False, ephemeral=True, ) def get_implicit_bases( self: ReferencedInheritingObjectT, schema:", "ref_base in child_referrer_bases: fq_name_in_child = self._classname_from_name( fq_name, ref_base.get_name(schema)) refname =", "schema = self._propagate_ref_rename(schema, context, scls) else: for op in self.get_subcommands(type=sd.ObjectCommand):", "bases: bases = so.ObjectList.create( schema, implicit_bases + [ b for" ]
[ "= \"\" def formatDate(self, date): elements = date.split(\"-\") return f\"{elements[2]}.", "list: if 'year' in item: self.output += f\"{item['title']} ({item['year']}), \"", "int(runtime) mins = time % 60 hours = int(time /", "'year' in item: self.output += f\"{item['title']} ({item['year']}), \" else: self.output", "date): elements = date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self,", "\"\" for item in list: self.output += f\"{item}, \" return", ">= n: break return self.output[:-2] def convertTime(self, runtime): time =", "', ' def partialJoin(self, list, n): self.output = \"\" i", "1) == -1: first = string.find(\".\", first + 1) return", "in list: self.output += f\"{item}, \" return self.output[:-2] #remove last", "+ 1) == -1: first = string.find(\".\", first + 1)", "= \"\" for item in list: self.output += f\"{item}, \"", "' def partialJoin(self, list, n): self.output = \"\" i =", "date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string, n): #return", "first n sentences from string first = string.find(\".\") for _", "= string.find(\".\") for _ in range(n - 1): if not", "1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output = \"\" for", "1 if i >= n: break return self.output[:-2] def convertTime(self,", "= 0 for item in list: self.output += f\"{item}, \"", "tup: self.output += f\"{item} \" return self.output[:-1] def joinList(self, list):", "return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string, n): #return first", "item: self.output += f\"{item['title']} ({item['year']}), \" else: self.output += f\"{item['title'].replace('", "mins = time % 60 hours = int(time / 60)", "= 0 for item in list: if 'year' in item:", "for _ in range(n - 1): if not string.find(\".\", first", "first + 1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output =", "+= 1 if i >= n: break return self.output[:-2] def", "processFilmography(self, list, n): self.output = \"\" i = 0 for", "hours = int(time / 60) if hours >= 1: return", "f\"{item}, \" return self.output[:-2] #remove last ', ' def partialJoin(self,", "for item in list: if 'year' in item: self.output +=", "= time % 60 hours = int(time / 60) if", "\"\" def formatDate(self, date): elements = date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]}", "else: self.output += f\"{item['title'].replace(' ()', '')}, \" i += 1", "in list: self.output += f\"{item}, \" i += 1 if", "f\"{item['title'].replace(' ()', '')}, \" i += 1 if i >=", "self.output[:-2] #remove last ', ' def partialJoin(self, list, n): self.output", "1: return f\"{hours} h {mins} min\" else: return f\"{mins} min\"", "list, n): self.output = \"\" i = 0 for item", "self.output = \"\" for item in list: self.output += f\"{item},", "int(time / 60) if hours >= 1: return f\"{hours} h", "return self.output[:-2] def processFilmography(self, list, n): self.output = \"\" i", "\"\" i = 0 for item in list: if 'year'", "f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output = \"\" for item in", "1): if not string.find(\".\", first + 1) == -1: first", "calendar import month_name class Tools: def __init__(self): self.output = \"\"", "i >= n: break return self.output[:-2] def convertTime(self, runtime): time", "+= f\"{item['title']} ({item['year']}), \" else: self.output += f\"{item['title'].replace(' ()', '')},", "/ 60) if hours >= 1: return f\"{hours} h {mins}", "shortenText(self, string, n): #return first n sentences from string first", "first + 1) == -1: first = string.find(\".\", first +", "+ 1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output = \"\"", "item in tup: self.output += f\"{item} \" return self.output[:-1] def", "item in list: if 'year' in item: self.output += f\"{item['title']}", "_ in range(n - 1): if not string.find(\".\", first +", "= int(time / 60) if hours >= 1: return f\"{hours}", "hours >= 1: return f\"{hours} h {mins} min\" else: return", "self.output = \"\" def formatDate(self, date): elements = date.split(\"-\") return", "\" return self.output[:-2] #remove last ', ' def partialJoin(self, list,", "Tools: def __init__(self): self.output = \"\" def formatDate(self, date): elements", "in item: self.output += f\"{item['title']} ({item['year']}), \" else: self.output +=", "({item['year']}), \" else: self.output += f\"{item['title'].replace(' ()', '')}, \" i", ">= 1: return f\"{hours} h {mins} min\" else: return f\"{mins}", "__init__(self): self.output = \"\" def formatDate(self, date): elements = date.split(\"-\")", "def shortenText(self, string, n): #return first n sentences from string", "if i >= n: break return self.output[:-2] def convertTime(self, runtime):", "self.output += f\"{item}, \" i += 1 if i >=", "def formatDate(self, date): elements = date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\"", "self.output += f\"{item} \" return self.output[:-1] def joinList(self, list): self.output", "= string.find(\".\", first + 1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup):", "{elements[0]}\" def shortenText(self, string, n): #return first n sentences from", "range(n - 1): if not string.find(\".\", first + 1) ==", "import month_name class Tools: def __init__(self): self.output = \"\" def", "in tup: self.output += f\"{item} \" return self.output[:-1] def joinList(self,", "string.find(\".\", first + 1) == -1: first = string.find(\".\", first", "def tupleUnpack(self, tup): self.output = \"\" for item in tup:", "== -1: first = string.find(\".\", first + 1) return f\"{string[:first-len(string)]}.\"", "n): self.output = \"\" i = 0 for item in", "not string.find(\".\", first + 1) == -1: first = string.find(\".\",", "= \"\" i = 0 for item in list: if", "if 'year' in item: self.output += f\"{item['title']} ({item['year']}), \" else:", "from string first = string.find(\".\") for _ in range(n -", "self.output += f\"{item}, \" return self.output[:-2] #remove last ', '", "break return self.output[:-2] def processFilmography(self, list, n): self.output = \"\"", "def convertTime(self, runtime): time = int(runtime) mins = time %", "runtime): time = int(runtime) mins = time % 60 hours", "def joinList(self, list): self.output = \"\" for item in list:", "n): #return first n sentences from string first = string.find(\".\")", "n sentences from string first = string.find(\".\") for _ in", "i >= n: break return self.output[:-2] def processFilmography(self, list, n):", "\"\" for item in tup: self.output += f\"{item} \" return", "f\"{item['title']} ({item['year']}), \" else: self.output += f\"{item['title'].replace(' ()', '')}, \"", "list: self.output += f\"{item}, \" return self.output[:-2] #remove last ',", "= int(runtime) mins = time % 60 hours = int(time", "elements = date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string,", "in range(n - 1): if not string.find(\".\", first + 1)", "for item in list: self.output += f\"{item}, \" i +=", "1 if i >= n: break return self.output[:-2] def processFilmography(self,", "time % 60 hours = int(time / 60) if hours", "first = string.find(\".\", first + 1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self,", "i = 0 for item in list: self.output += f\"{item},", "time = int(runtime) mins = time % 60 hours =", "()', '')}, \" i += 1 if i >= n:", "break return self.output[:-2] def convertTime(self, runtime): time = int(runtime) mins", "month_name class Tools: def __init__(self): self.output = \"\" def formatDate(self,", "+= f\"{item}, \" return self.output[:-2] #remove last ', ' def", "- 1): if not string.find(\".\", first + 1) == -1:", "= \"\" for item in tup: self.output += f\"{item} \"", "\" i += 1 if i >= n: break return", "convertTime(self, runtime): time = int(runtime) mins = time % 60", "last ', ' def partialJoin(self, list, n): self.output = \"\"", "n: break return self.output[:-2] def processFilmography(self, list, n): self.output =", "from calendar import month_name class Tools: def __init__(self): self.output =", "def __init__(self): self.output = \"\" def formatDate(self, date): elements =", ">= n: break return self.output[:-2] def processFilmography(self, list, n): self.output", "sentences from string first = string.find(\".\") for _ in range(n", "if hours >= 1: return f\"{hours} h {mins} min\" else:", "item in list: self.output += f\"{item}, \" i += 1", "list: self.output += f\"{item}, \" i += 1 if i", "if i >= n: break return self.output[:-2] def processFilmography(self, list,", "60 hours = int(time / 60) if hours >= 1:", "self.output[:-2] def processFilmography(self, list, n): self.output = \"\" i =", "string, n): #return first n sentences from string first =", "{month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string, n): #return first n sentences", "joinList(self, list): self.output = \"\" for item in list: self.output", "\"\" i = 0 for item in list: self.output +=", "if not string.find(\".\", first + 1) == -1: first =", "'')}, \" i += 1 if i >= n: break", "tupleUnpack(self, tup): self.output = \"\" for item in tup: self.output", "i = 0 for item in list: if 'year' in", "self.output = \"\" for item in tup: self.output += f\"{item}", "tup): self.output = \"\" for item in tup: self.output +=", "def processFilmography(self, list, n): self.output = \"\" i = 0", "f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string, n): #return first n", "first = string.find(\".\") for _ in range(n - 1): if", "formatDate(self, date): elements = date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def", "partialJoin(self, list, n): self.output = \"\" i = 0 for", "+= f\"{item['title'].replace(' ()', '')}, \" i += 1 if i", "for item in list: self.output += f\"{item}, \" return self.output[:-2]", "#remove last ', ' def partialJoin(self, list, n): self.output =", "= date.split(\"-\") return f\"{elements[2]}. {month_name[int(elements[1])]} {elements[0]}\" def shortenText(self, string, n):", "class Tools: def __init__(self): self.output = \"\" def formatDate(self, date):", "list): self.output = \"\" for item in list: self.output +=", "\" return self.output[:-1] def joinList(self, list): self.output = \"\" for", "string.find(\".\") for _ in range(n - 1): if not string.find(\".\",", "return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output = \"\" for item", "f\"{item} \" return self.output[:-1] def joinList(self, list): self.output = \"\"", "self.output[:-2] def convertTime(self, runtime): time = int(runtime) mins = time", "return self.output[:-2] def convertTime(self, runtime): time = int(runtime) mins =", "0 for item in list: if 'year' in item: self.output", "= \"\" i = 0 for item in list: self.output", "\" else: self.output += f\"{item['title'].replace(' ()', '')}, \" i +=", "+= f\"{item}, \" i += 1 if i >= n:", "self.output += f\"{item['title'].replace(' ()', '')}, \" i += 1 if", "self.output += f\"{item['title']} ({item['year']}), \" else: self.output += f\"{item['title'].replace(' ()',", "n: break return self.output[:-2] def convertTime(self, runtime): time = int(runtime)", "-1: first = string.find(\".\", first + 1) return f\"{string[:first-len(string)]}.\" def", "string first = string.find(\".\") for _ in range(n - 1):", "60) if hours >= 1: return f\"{hours} h {mins} min\"", "self.output[:-1] def joinList(self, list): self.output = \"\" for item in", "return self.output[:-1] def joinList(self, list): self.output = \"\" for item", "return self.output[:-2] #remove last ', ' def partialJoin(self, list, n):", "def partialJoin(self, list, n): self.output = \"\" i = 0", "#return first n sentences from string first = string.find(\".\") for", "for item in tup: self.output += f\"{item} \" return self.output[:-1]", "f\"{item}, \" i += 1 if i >= n: break", "self.output = \"\" i = 0 for item in list:", "i += 1 if i >= n: break return self.output[:-2]", "in list: if 'year' in item: self.output += f\"{item['title']} ({item['year']}),", "item in list: self.output += f\"{item}, \" return self.output[:-2] #remove", "% 60 hours = int(time / 60) if hours >=", "string.find(\".\", first + 1) return f\"{string[:first-len(string)]}.\" def tupleUnpack(self, tup): self.output", "+= f\"{item} \" return self.output[:-1] def joinList(self, list): self.output =", "0 for item in list: self.output += f\"{item}, \" i" ]
[ "code, head,res, errcode, _ = curl.curl2(target) if code == 200", "arg): if service == \"shop7z\": return True, arg def audit(arg):", "# -*- coding: utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_ =", "200 and 'name=\"lipinname\"' in res and 'name=\"showflag\"' in res: security_hole(target)", "arg + payload code, head,res, errcode, _ = curl.curl2(target) if", "= arg + payload code, head,res, errcode, _ = curl.curl2(target)", "in res: security_hole(target) if __name__ == '__main__': from dummy import", "if service == \"shop7z\": return True, arg def audit(arg): payload", "re def assign(service, arg): if service == \"shop7z\": return True,", "#!/usr/bin/env python # -*- coding: utf-8 -*- #__Author__ = 烽火戏诸侯", "head,res, errcode, _ = curl.curl2(target) if code == 200 and", "== \"shop7z\": return True, arg def audit(arg): payload = 'admin/lipinadd.asp'", "'admin/lipinadd.asp' target = arg + payload code, head,res, errcode, _", "\"shop7z\": return True, arg def audit(arg): payload = 'admin/lipinadd.asp' target", "-*- coding: utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z", "target = arg + payload code, head,res, errcode, _ =", "payload code, head,res, errcode, _ = curl.curl2(target) if code ==", "arg def audit(arg): payload = 'admin/lipinadd.asp' target = arg +", "res and 'name=\"showflag\"' in res: security_hole(target) if __name__ == '__main__':", "curl.curl2(target) if code == 200 and 'name=\"lipinname\"' in res and", "= Shop7z /admin/lipinadd.asp越权访问 import re def assign(service, arg): if service", "烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re def assign(service, arg):", "in res and 'name=\"showflag\"' in res: security_hole(target) if __name__ ==", "service == \"shop7z\": return True, arg def audit(arg): payload =", "security_hole(target) if __name__ == '__main__': from dummy import * audit(assign('shop7z',", "'name=\"showflag\"' in res: security_hole(target) if __name__ == '__main__': from dummy", "Shop7z /admin/lipinadd.asp越权访问 import re def assign(service, arg): if service ==", "#__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re def", "def assign(service, arg): if service == \"shop7z\": return True, arg", "if code == 200 and 'name=\"lipinname\"' in res and 'name=\"showflag\"'", "def audit(arg): payload = 'admin/lipinadd.asp' target = arg + payload", "if __name__ == '__main__': from dummy import * audit(assign('shop7z', 'http://www.99ysbjw.com/')[1])", "#_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re def assign(service, arg): if", "/admin/lipinadd.asp越权访问 import re def assign(service, arg): if service == \"shop7z\":", "assign(service, arg): if service == \"shop7z\": return True, arg def", "True, arg def audit(arg): payload = 'admin/lipinadd.asp' target = arg", "res: security_hole(target) if __name__ == '__main__': from dummy import *", "== 200 and 'name=\"lipinname\"' in res and 'name=\"showflag\"' in res:", "return True, arg def audit(arg): payload = 'admin/lipinadd.asp' target =", "errcode, _ = curl.curl2(target) if code == 200 and 'name=\"lipinname\"'", "code == 200 and 'name=\"lipinname\"' in res and 'name=\"showflag\"' in", "= curl.curl2(target) if code == 200 and 'name=\"lipinname\"' in res", "= 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re def assign(service,", "utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import", "'name=\"lipinname\"' in res and 'name=\"showflag\"' in res: security_hole(target) if __name__", "audit(arg): payload = 'admin/lipinadd.asp' target = arg + payload code,", "_ = curl.curl2(target) if code == 200 and 'name=\"lipinname\"' in", "import re def assign(service, arg): if service == \"shop7z\": return", "-*- #__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问 import re", "and 'name=\"lipinname\"' in res and 'name=\"showflag\"' in res: security_hole(target) if", "and 'name=\"showflag\"' in res: security_hole(target) if __name__ == '__main__': from", "coding: utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_ = Shop7z /admin/lipinadd.asp越权访问", "python # -*- coding: utf-8 -*- #__Author__ = 烽火戏诸侯 #_PlugName_", "= 'admin/lipinadd.asp' target = arg + payload code, head,res, errcode,", "payload = 'admin/lipinadd.asp' target = arg + payload code, head,res,", "+ payload code, head,res, errcode, _ = curl.curl2(target) if code" ]
[ "EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity,", "= light.manufacturername == \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype", "\"None\" # Minimum Hue Bridge API version to support groups", ".helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF", "Hue lights. Can only be called when a user accidentally", "self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added in", "PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo from", "str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\": err = ( \"Please check", "err @callback def async_update_items( bridge, api, current, async_add_entities, create_item, new_items_callback", "async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR,", "homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import", "group.lights: rooms[light_id] = group.name # Once we do a rooms", "added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener = None @callback", "for item_id in api: if item_id in current: continue current[item_id]", "command[\"alert\"] = \"none\" if ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT]", "async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old way of setting up Hue", "state object for groups # 1.13 introduced \"any_on\" to group", "called we should not add another listener return cancel_update_rooms_listener =", "light.\"\"\" return self.light.name @property def brightness(self): \"\"\"Return the brightness of", "light.\"\"\" unique_id = self.light.uniqueid if not unique_id and self.is_group and", "\"\"\"Return the device info.\"\"\" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM,", "cancel_update_rooms_listener if cancel_update_rooms_listener is not None: # If there are", "_LOGGER.warning(\"Please update your Hue bridge to support groups\") light_coordinator =", "= 0 for light_id in api_item.lights: if light_id not in", "the listener # until the next time lights are added", "a user accidentally mentions hue platform in their config. But", "bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First do a", "bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups = bridge.allow_groups supports_groups = api_version", "False self.is_innr = False self.is_ewelink = False self.is_livarno = False", "360 * 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255)", "SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback from", "update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ),", "\"\"\"Return the warmest color_temp that this light supports.\"\"\" if self.is_group:", "command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1]", "1.4.0 introduced extended group info # 1.12 introduced the state", ") async def async_added_to_hass(self) -> None: \"\"\"Handle entity being added", "we can reach the hub. # Otherwise we will declare", "@property def name(self): \"\"\"Return the name of the Hue light.\"\"\"", "if ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT] if effect ==", "the ID of this Hue light.\"\"\" return self.unique_id @property def", "def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up the Hue lights from", "self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return the hs color value.\"\"\" mode", "called when a user accidentally mentions hue platform in their", "is available.\"\"\" return self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable or", "we cancel the listener # until the next time lights", "return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh()", "and ( self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property def", "@property def color_temp(self): \"\"\"Return the CT color value.\"\"\" # Don't", "self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return the coldest color_temp", "{}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), None, )", "it would have been ignored. \"\"\" def create_light(item_class, coordinator, bridge,", "@property def is_on(self): \"\"\"Return true if device is on.\"\"\" if", "hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"] =", "import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer", "GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL =", "str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def unique_id(self):", "if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254", "light_id in api_item.lights: if light_id not in bridge.api.lights: continue light", "aiohue.AiohueException as err: raise UpdateFailed(f\"Hue error: {err}\") from err @callback", "= int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] /", "self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\") if bri is None: return", "self._color_mode source = self.light.action if self.is_group else self.light.state if mode", "bridge self.is_group = is_group self._supported_features = supported_features self._rooms = rooms", "return self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return the hs color value.\"\"\"", "max_mireds(self): \"\"\"Return the warmest color_temp that this light supports.\"\"\" if", "def effect(self): \"\"\"Return the current effect.\"\"\" return self.light.state.get(\"effect\", None) @property", "FLASH_LONG: command[\"alert\"] = \"lselect\" del command[\"on\"] elif flash == FLASH_SHORT:", "in (\"xy\", \"hs\") and \"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut)", "async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\" try: with async_timeout.timeout(4): return await", "return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo | None:", "= kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in", "current)) if new_items: # This is currently used to setup", "convert to XY first to ensure a consistent # color.", "== \"OSRAM\" self.is_philips = light.manufacturername == \"Philips\" self.is_innr = light.manufacturername", "if self._color_mode != \"ct\": return None if self.is_group: return self.light.action.get(\"ct\")", "async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up the Hue lights from a", "in bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups = bridge.allow_groups supports_groups =", "err = ( \"Please check for software updates of the", "light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut of", "and not self.is_livarno: command[\"alert\"] = \"none\" if self.is_group: await self.bridge.async_request_call(", "None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None", "the hub. # Otherwise we will declare not ready. await", "= None @property def unique_id(self): \"\"\"Return the unique ID of", "UpdateFailed(f\"Hue error: {err}\") from err @callback def async_update_items( bridge, api,", "supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class(", "v in bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups = bridge.allow_groups supports_groups", "reported by 3rd party buls if not min_mireds: return super().min_mireds", "ID of this Hue light.\"\"\" return self.unique_id @property def name(self):", ") bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge,", "| SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE", "format 0..255.\"\"\" return min(255, round((value / 254) * 255)) def", "SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback", "or self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property def supported_features(self): \"\"\"Flag supported", "if self.light.swupdatestate == \"readytoinstall\": err = ( \"Please check for", "\"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property def", ") @property def supported_features(self): \"\"\"Flag supported features.\"\"\" return self._supported_features @property", "the device state attributes.\"\"\" if not self.is_group: return {} return", "light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in", "SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, }", "if cancel_update_rooms_listener is not None: # If there are new", "self.light.modelid, name=self.name, # Not yet exposed as properties in aiohue", "extra_state_attributes(self): \"\"\"Return the device state attributes.\"\"\" if not self.is_group: return", "return max(1, round((value / 255) * 254)) class HueLight(CoordinatorEntity, LightEntity):", "else: command[\"effect\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command)", "rooms update, we cancel the listener # until the next", "in api: if item_id in current: continue current[item_id] = create_item(api,", "self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return the hs color", "self.light.room[\"id\"] return unique_id @property def device_id(self): \"\"\"Return the ID of", "introduced extended group info # 1.12 introduced the state object", "\"\"\"Update items.\"\"\" new_items = [] for item_id in api: if", "SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature", "request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First do", "self.is_osram = False self.is_philips = False self.is_innr = False self.is_ewelink", "command[\"effect\"] = \"colorloop\" elif effect == EFFECT_RANDOM: command[\"hue\"] = random.randrange(0,", "return self.light.state.get(\"effect\", None) @property def effect_list(self): \"\"\"Return the list of", "of the %s \" \"bulb in the Philips Hue App.\"", "refresh to see if we can reach the hub. #", "Hue light.\"\"\" return self.unique_id @property def name(self): \"\"\"Return the name", "pylint: disable=not-callable cancel_update_rooms_listener = None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener", "groups # 1.4.0 introduced extended group info # 1.12 introduced", "self.async_write_ha_state ) ) await super().async_added_to_hass() async def async_turn_on(self, **kwargs): \"\"\"Turn", "GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area = None", "Philips Hue App.\" ) _LOGGER.warning(err, self.name) if self.gamut and not", "max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds: return super().max_mireds return", "group.type != GROUP_TYPE_ROOM: continue for light_id in group.lights: rooms[light_id] =", "supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED)", "bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True, None), None,", "after fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support)", "UpdateFailed, ) from homeassistant.util import color from .const import (", "fetch data.\"\"\" try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized", "self.bridge = bridge self.is_group = is_group self._supported_features = supported_features self._rooms", "if self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return", "partial import logging import random import aiohue import async_timeout from", "effect.\"\"\" return self.light.state.get(\"effect\", None) @property def effect_list(self): \"\"\"Return the list", "light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP,", "_LOGGER.debug(\"Color gamut of %s: %s\", self.name, str(self.gamut)) if self.light.swupdatestate ==", "color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP", "self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command) )", "command[\"on\"] elif not self.is_innr and not self.is_ewelink and not self.is_livarno:", "not unique_id and self.is_group and self.light.room: unique_id = self.light.room[\"id\"] return", "bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms),", "to group state objects GROUP_MIN_API_VERSION = (1, 13, 0) async", "self.gamut = None else: self.is_osram = light.manufacturername == \"OSRAM\" self.is_philips", "cancel the listener # until the next time lights are", "unique_id = self.light.uniqueid if not unique_id and self.is_group and self.light.room:", "via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) -> None: \"\"\"Handle entity", "\"\"\"Turn the specified or all lights on.\"\"\" command = {\"on\":", "command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in kwargs: if", "# Minimum Hue Bridge API version to support groups #", "GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area = None if self.light.id in", "or self.light.state[\"reachable\"] ) @property def supported_features(self): \"\"\"Flag supported features.\"\"\" return", "in kwargs: if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360 *", "been ignored. \"\"\" def create_light(item_class, coordinator, bridge, is_group, rooms, api,", "color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP in kwargs: temp", "true if device is on.\"\"\" if self.is_group: return self.light.state[\"any_on\"] return", "homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import", "if self.is_group: bri = self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\") if", "accidentally mentions hue platform in their config. But even in", "= self.light.colorgamut _LOGGER.debug(\"Color gamut of %s: %s\", self.name, str(self.gamut)) if", "def effect_list(self): \"\"\"Return the list of supported effects.\"\"\" if self.is_osram:", "SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR", "@callback def async_update_items( bridge, api, current, async_add_entities, create_item, new_items_callback ):", "the specified or all lights off.\"\"\" command = {\"on\": False}", "if not unique_id and self.is_group and self.light.room: unique_id = self.light.room[\"id\"]", "that this light supports.\"\"\" if self.is_group: return super().min_mireds min_mireds =", "def brightness(self): \"\"\"Return the brightness of this light between 0..255.\"\"\"", "Not yet exposed as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN,", "_LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups: update_groups = partial(", "gamut of %s: %s\", self.name, str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\":", "return self.unique_id @property def name(self): \"\"\"Return the name of the", "data.\"\"\" try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as", "* 254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a Hue light.\"\"\"", "hass brightness 0..255 to hue 1..254 scale.\"\"\" return max(1, round((value", "of setting up Hue lights. Can only be called when", "light self.bridge = bridge self.is_group = is_group self._supported_features = supported_features", "SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED =", "Don't return color temperature unless in color temperature mode if", "try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err:", "def available(self): \"\"\"Return if light is available.\"\"\" return self.coordinator.last_update_success and", "Hue Bridge API 1.24 # (published 03/05/2018) model=self.light.productname or self.light.modelid,", "\"\"\"Return the hue color mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\") return", "async def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up the Hue lights", "we convert to XY first to ensure a consistent #", "can reach the hub. # Otherwise we will declare not", "def color_temp(self): \"\"\"Return the CT color value.\"\"\" # Don't return", "# requests, so we convert to XY first to ensure", "* 255)) def hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255 to hue", "light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch,", "async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False,", "self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property def supported_features(self): \"\"\"Flag supported features.\"\"\"", "= partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator,", "return super().max_mireds return max_mireds @property def is_on(self): \"\"\"Return true if", "min_mireds @property def max_mireds(self): \"\"\"Return the warmest color_temp that this", "from homeassistant.util import color from .const import ( DOMAIN as", "temperature mode if self._color_mode != \"ct\": return None if self.is_group:", "bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups: update_groups =", "item_id): \"\"\"Create the light.\"\"\" api_item = api[item_id] if is_group: supported_features", "= color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP in kwargs:", "UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException as err: raise UpdateFailed(f\"Hue error:", "cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups: group = bridge.api.groups[item_id] if", "= random.randrange(150, 254) else: command[\"effect\"] = \"none\" if self.is_group: await", "self.unique_id @property def name(self): \"\"\"Return the name of the Hue", "the list of supported effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM] return", "color temperature unless in color temperature mode if self._color_mode !=", "_LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE =", "api_item, supported_features, rooms ) async def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set", "bri = self.light.state.get(\"bri\") if bri is None: return bri return", "We filter out '0' too, which can be incorrectly reported", "= hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"]", "light supports.\"\"\" if self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\")", "(\"xy\", \"hs\") and \"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return", "%s: %s, not valid, setting gamut to None.\" _LOGGER.debug(err, self.name,", "hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v in bridge.api.config.apiversion.split(\".\")) rooms =", "in their config. But even in that case it would", "| SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color", "async_add_entities, discovery_info=None): \"\"\"Old way of setting up Hue lights. Can", "1..254 scale.\"\"\" return max(1, round((value / 255) * 254)) class", "in group.lights: rooms[light_id] = group.name # Once we do a", "self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut of %s: %s\", self.name,", "unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\"", "if new_items: # This is currently used to setup the", "bridge, False, rooms), None, ) # We add a listener", "SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP", "hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER,", "api: if item_id in current: continue current[item_id] = create_item(api, item_id)", "self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property def supported_features(self): \"\"\"Flag", "ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) flash =", "{\"on\": True} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] *", "the light.\"\"\" super().__init__(coordinator) self.light = light self.bridge = bridge self.is_group", "lights. Can only be called when a user accidentally mentions", "if item_id in current: continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id])", "name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True", "unique ID of this Hue light.\"\"\" unique_id = self.light.uniqueid if", "introduced the state object for groups # 1.13 introduced \"any_on\"", "info.\"\"\" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ):", "light.\"\"\" api_item = api[item_id] if is_group: supported_features = 0 for", "SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE =", "elif not self.is_innr and not self.is_livarno: command[\"alert\"] = \"none\" if", "items.\"\"\" new_items = [] for item_id in api: if item_id", "import logging import random import aiohue import async_timeout from homeassistant.components.light", "light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" #", "If there are new lights added before _async_update_rooms # is", "* 10) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"]", "if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call(", "cancel_update_rooms_listener = None @callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener", "up Hue lights. Can only be called when a user", "= [] for item_id in api: if item_id in current:", "| None: \"\"\"Return the device info.\"\"\" if self.light.type in (", "available(self): \"\"\"Return if light is available.\"\"\" return self.coordinator.last_update_success and (", "in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area", "self.is_livarno: command[\"alert\"] = \"none\" if ATTR_EFFECT in kwargs: effect =", "= timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION", "light_coordinator, bridge, False, rooms), _setup_rooms_listener, ) # We add a", "of the Hue light.\"\"\" return self.light.name @property def brightness(self): \"\"\"Return", "see if we can reach the hub. # Otherwise we", "cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener = None @callback def _setup_rooms_listener():", "\"Please check for software updates of the %s \" \"bulb", "models respond differently to hue/sat # requests, so we convert", "self.gamut) return None @property def color_temp(self): \"\"\"Return the CT color", "self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state,", "def min_mireds(self): \"\"\"Return the coldest color_temp that this light supports.\"\"\"", "def extra_state_attributes(self): \"\"\"Return the device state attributes.\"\"\" if not self.is_group:", "which can be incorrectly reported by 3rd party buls if", "\"OSRAM\" self.is_philips = light.manufacturername == \"Philips\" self.is_innr = light.manufacturername ==", "= SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item, supported_features,", "if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command[\"sat\"]", "ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH,", "async_add_entities, create_item, new_items_callback ): \"\"\"Update items.\"\"\" new_items = [] for", "self.light.name @property def brightness(self): \"\"\"Return the brightness of this light", "== \"innr\" self.is_ewelink = light.manufacturername == \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\")", "None, ) # We add a listener after fetching the", "annotations from datetime import timedelta from functools import partial import", "= False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut =", "def async_update_items( bridge, api, current, async_add_entities, create_item, new_items_callback ): \"\"\"Update", "= partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator,", "self.light.state.get(\"effect\", None) @property def effect_list(self): \"\"\"Return the list of supported", "effect = kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\"", "in bridge.api.groups: group = bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue", "reach the hub. # Otherwise we will declare not ready.", "True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def", "is None: return bri return hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return", "Bridge API version to support groups # 1.4.0 introduced extended", "platform in their config. But even in that case it", "hue/sat # requests, so we convert to XY first to", "None else: self.is_osram = light.manufacturername == \"OSRAM\" self.is_philips = light.manufacturername", "async def async_turn_on(self, **kwargs): \"\"\"Turn the specified or all lights", "homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed,", "%s\", self.name, str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\": err = (", "SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE |", "name(self): \"\"\"Return the name of the Hue light.\"\"\" return self.light.name", "in that case it would have been ignored. \"\"\" def", "bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener = None @callback def", "temperature unless in color temperature mode if self._color_mode != \"ct\":", "light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\"", "_async_update_rooms # is called we should not add another listener", "as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async", "= False self.is_innr = False self.is_ewelink = False self.is_livarno =", "\"ct\": return None if self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property", "supported_features = 0 for light_id in api_item.lights: if light_id not", "if not supports_groups: update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights, {},", "@property def extra_state_attributes(self): \"\"\"Return the device state attributes.\"\"\" if not", "light.manufacturername == \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut", "bri is None: return bri return hue_brightness_to_hass(bri) @property def _color_mode(self):", "setting up Hue lights. Can only be called when a", "return unique_id @property def device_id(self): \"\"\"Return the ID of this", "= \"Color gamut of %s: %s, not valid, setting gamut", "SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core", "self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def", "light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups: update_lights_without_group_support", "command[\"sat\"] = random.randrange(150, 254) else: command[\"effect\"] = \"none\" if self.is_group:", "@property def max_mireds(self): \"\"\"Return the warmest color_temp that this light", "new_items: # This is currently used to setup the listener", "xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP in", "else: self.is_osram = light.manufacturername == \"OSRAM\" self.is_philips = light.manufacturername ==", "from err except aiohue.AiohueException as err: raise UpdateFailed(f\"Hue error: {err}\")", "self._supported_features @property def effect(self): \"\"\"Return the current effect.\"\"\" return self.light.state.get(\"effect\",", "for light_id in group.lights: rooms[light_id] = group.name # Once we", "def name(self): \"\"\"Return the name of the Hue light.\"\"\" return", "self.light.swupdatestate == \"readytoinstall\": err = ( \"Please check for software", "self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command[\"sat\"] =", "int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100", "if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not", "bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), None,", "so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def", "api, current)) if new_items: # This is currently used to", "): \"\"\"Update items.\"\"\" new_items = [] for item_id in api:", "self.light = light self.bridge = bridge self.is_group = is_group self._supported_features", "ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp,", "10) if ATTR_HS_COLOR in kwargs: if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0]", "is_group: supported_features = 0 for light_id in api_item.lights: if light_id", "{err}\") from err @callback def async_update_items( bridge, api, current, async_add_entities,", "\" \"bulb in the Philips Hue App.\" ) _LOGGER.warning(err, self.name)", "setting gamut to None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE", "device info.\"\"\" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE,", "( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area =", "= None if self.light.id in self._rooms: suggested_area = self._rooms[self.light.id] return", "user accidentally mentions hue platform in their config. But even", "bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"group\",", "self.gamut) command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP in kwargs: temp =", "elif effect == EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"] =", "!= GROUP_TYPE_ROOM: continue for light_id in group.lights: rooms[light_id] = group.name", "rooms = {} allow_groups = bridge.allow_groups supports_groups = api_version >=", "listener # until the next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener)", "datetime import timedelta from functools import partial import logging import", "group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial(", "effect(self): \"\"\"Return the current effect.\"\"\" return self.light.state.get(\"effect\", None) @property def", "name=self.name, # Not yet exposed as properties in aiohue suggested_area=suggested_area,", "self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs):", "coordinator, bridge, is_group, api_item, supported_features, rooms ) async def async_setup_entry(hass,", "hs_color(self): \"\"\"Return the hs color value.\"\"\" mode = self._color_mode source", "self._supported_features = supported_features self._rooms = rooms if is_group: self.is_osram =", "update rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue", "} ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum Hue", "of a Hue light.\"\"\" def __init__(self, coordinator, bridge, is_group, light,", "None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not", "self.light.room: unique_id = self.light.room[\"id\"] return unique_id @property def device_id(self): \"\"\"Return", "= SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP |", "return hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return the hue color mode.\"\"\"", "groups # 1.13 introduced \"any_on\" to group state objects GROUP_MIN_API_VERSION", "in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property def color_temp(self):", "self.is_livarno: command[\"alert\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command)", "as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers", "on.\"\"\" if self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def available(self):", "GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram = light.manufacturername == \"OSRAM\"", "supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item,", "ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum Hue Bridge", "65535) command[\"sat\"] = random.randrange(150, 254) else: command[\"effect\"] = \"none\" if", "allow_groups = bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION if allow_groups", "None: # If there are new lights added before _async_update_rooms", "and \"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property", "= ( \"Please check for software updates of the %s", "import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF =", "and not color.check_valid_gamut(self.gamut): err = \"Color gamut of %s: %s,", "support groups\") light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge,", "config. But even in that case it would have been", "= GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram = light.manufacturername ==", "SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color light\":", "a refresh to see if we can reach the hub.", "for light_id in api_item.lights: if light_id not in bridge.api.lights: continue", "bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener,", "bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: # This is currently used", "= create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: #", "return 500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds: return", "self.light.state.get(\"bri\") if bri is None: return bri return hue_brightness_to_hass(bri) @property", "\"\"\"Return if light is available.\"\"\" return self.coordinator.last_update_success and ( self.is_group", "mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def hs_color(self):", "HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener, ) # We add", "bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from", "None: return bri return hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return the", "suggested_area = None if self.light.id in self._rooms: suggested_area = self._rooms[self.light.id]", "bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal", "command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG:", "api_item.lights: if light_id not in bridge.api.lights: continue light = bridge.api.lights[light_id]", "if group.type != GROUP_TYPE_ROOM: continue for light_id in group.lights: rooms[light_id]", "async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True,", "self.light.uniqueid if not unique_id and self.is_group and self.light.room: unique_id =", "1..254 to hass format 0..255.\"\"\" return min(255, round((value / 254)", "in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash", "in api_item.lights: if light_id not in bridge.api.lights: continue light =", "self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash =", "self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def available(self): \"\"\"Return if light is", "setup the listener to update rooms if new_items_callback: new_items_callback() async_add_entities(new_items)", "DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from", "if allow_groups and not supports_groups: _LOGGER.warning(\"Please update your Hue bridge", "return max_mireds @property def is_on(self): \"\"\"Return true if device is", "%s: %s\", self.name, str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\": err =", "1.13 introduced \"any_on\" to group state objects GROUP_MIN_API_VERSION = (1,", "the hue color mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\")", "await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async def async_turn_off(self,", "max_mireds: return super().max_mireds return max_mireds @property def is_on(self): \"\"\"Return true", "gamut to None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut", "lights from a config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version =", "the name of the Hue light.\"\"\" return self.light.name @property def", "bri return hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return the hue color", "self.light.id in self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)},", "command[\"alert\"] = \"select\" del command[\"on\"] elif not self.is_innr and not", "not color.check_valid_gamut(self.gamut): err = \"Color gamut of %s: %s, not", "def unique_id(self): \"\"\"Return the unique ID of this Hue light.\"\"\"", "self.is_group: return super().max_mireds if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get(\"ct\",", "supported_features(self): \"\"\"Flag supported features.\"\"\" return self._supported_features @property def effect(self): \"\"\"Return", "max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS])", "command[\"effect\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) )", "1.24 # (published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, # Not", "for item_id in bridge.api.groups: group = bridge.api.groups[item_id] if group.type !=", "@property def unique_id(self): \"\"\"Return the unique ID of this Hue", "self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return", "declare not ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady", "the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support()", "= {} allow_groups = bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION", "= bridge self.is_group = is_group self._supported_features = supported_features self._rooms =", "= tuple(int(v) for v in bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups", "and self.is_group and self.light.room: unique_id = self.light.room[\"id\"] return unique_id @property", "color from .const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE,", "rooms, api, item_id): \"\"\"Create the light.\"\"\" api_item = api[item_id] if", "= int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH) if flash ==", "callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from", "we do a rooms update, we cancel the listener #", ") await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return the device state", "SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge,", "only be called when a user accidentally mentions hue platform", "self.is_philips = False self.is_innr = False self.is_ewelink = False self.is_livarno", "unique_id = self.light.room[\"id\"] return unique_id @property def device_id(self): \"\"\"Return the", ") bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\"", "= self._color_mode source = self.light.action if self.is_group else self.light.state if", "= \"select\" del command[\"on\"] elif not self.is_innr and not self.is_ewelink", "flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"] = \"lselect\"", "GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL", "\"\"\" def create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id): \"\"\"Create", "from .const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE,", "elif ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds,", "min_mireds(self): \"\"\"Return the coldest color_temp that this light supports.\"\"\" if", ") await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): \"\"\"Turn the specified", "\"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light = light self.bridge = bridge", "device_id(self): \"\"\"Return the ID of this Hue light.\"\"\" return self.unique_id", "return super().max_mireds if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\")", ") ) await super().async_added_to_hass() async def async_turn_on(self, **kwargs): \"\"\"Turn the", "or all lights off.\"\"\" command = {\"on\": False} if ATTR_TRANSITION", "that case it would have been ignored. \"\"\" def create_light(item_class,", "even in that case it would have been ignored. \"\"\"", "add a listener after fetching the data, so manually trigger", "of supported effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM]", "state attributes.\"\"\" if not self.is_group: return {} return {ATTR_IS_HUE_GROUP: self.is_group}", "= supported_features self._rooms = rooms if is_group: self.is_osram = False", "in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds))", "can be incorrectly reported by 3rd party buls if not", "the state object for groups # 1.13 introduced \"any_on\" to", "**kwargs): \"\"\"Turn the specified or all lights off.\"\"\" command =", "None if self.light.id in self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo(", "SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item, supported_features, rooms )", "in kwargs: effect = kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command[\"effect\"]", "@property def min_mireds(self): \"\"\"Return the coldest color_temp that this light", "used to setup the listener to update rooms if new_items_callback:", "= \"lselect\" del command[\"on\"] elif flash == FLASH_SHORT: command[\"alert\"] =", "**kwargs): \"\"\"Turn the specified or all lights on.\"\"\" command =", "__init__(self, coordinator, bridge, is_group, light, supported_features, rooms): \"\"\"Initialize the light.\"\"\"", "to hue/sat # requests, so we convert to XY first", "= int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else: # Philips hue", "self.is_philips = light.manufacturername == \"Philips\" self.is_innr = light.manufacturername == \"innr\"", "new_items_callback ): \"\"\"Update items.\"\"\" new_items = [] for item_id in", "SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback from homeassistant.exceptions", "being added to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state", "def create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id): \"\"\"Create the", "until the next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() #", "there are new lights added before _async_update_rooms # is called", "value.\"\"\" # Don't return color temperature unless in color temperature", "is_group self._supported_features = supported_features self._rooms = rooms if is_group: self.is_osram", "in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self)", "hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254 to hass format 0..255.\"\"\" return", "item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: # This is", "ignored. \"\"\" def create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id):", "state objects GROUP_MIN_API_VERSION = (1, 13, 0) async def async_setup_platform(hass,", "return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter out", "\"none\" if ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT] if effect", "Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass()", "all lights off.\"\"\" command = {\"on\": False} if ATTR_TRANSITION in", "update_groups = partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight,", "bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features", "def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\" try: with async_timeout.timeout(4): return", "to XY first to ensure a consistent # color. xy_color", "False, rooms), None, ) # We add a listener after", "{}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener, )", "False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None", "0) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old way of", "super().max_mireds return max_mireds @property def is_on(self): \"\"\"Return true if device", "SUPPORT_HUE = { \"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR,", "self.light.state[\"reachable\"] ) @property def supported_features(self): \"\"\"Flag supported features.\"\"\" return self._supported_features", "FLASH_SHORT: command[\"alert\"] = \"select\" del command[\"on\"] elif not self.is_innr and", "Hue lights.\"\"\" from __future__ import annotations from datetime import timedelta", "255)) def hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255 to hue 1..254", "\"\"\"Return the ID of this Hue light.\"\"\" return self.unique_id @property", "new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254 to hass", "[EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo |", "info # 1.12 introduced the state object for groups #", "FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, )", "Bridge API 1.24 # (published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name,", "random.randrange(150, 254) else: command[\"effect\"] = \"none\" if self.is_group: await self.bridge.async_request_call(", "== \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut =", "), ) # First do a refresh to see if", "DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color from .const import", "homeassistant.util import color from .const import ( DOMAIN as HUE_DOMAIN,", "device_info(self) -> DeviceInfo | None: \"\"\"Return the device info.\"\"\" if", "partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight, light_coordinator, bridge,", "temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\"", "\"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE,", "cancel_update_rooms_listener = None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener", "new lights added before _async_update_rooms # is called we should", "Once we do a rooms update, we cancel the listener", "1.12 introduced the state object for groups # 1.13 introduced", "exposed as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), )", "will declare not ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise", "bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), )", "respond differently to hue/sat # requests, so we convert to", "import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY,", "as err: raise UpdateFailed(f\"Hue error: {err}\") from err @callback def", "self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def available(self): \"\"\"Return if", "to see if we can reach the hub. # Otherwise", "entity being added to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id,", "# Don't return color temperature unless in color temperature mode", "= None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is", "return item_class( coordinator, bridge, is_group, api_item, supported_features, rooms ) async", "incorrectly reported by 3rd party buls if not min_mireds: return", "is on.\"\"\" if self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def", "hue 1..254 scale.\"\"\" return max(1, round((value / 255) * 254))", "Philips Hue lights.\"\"\" from __future__ import annotations from datetime import", "err except aiohue.AiohueException as err: raise UpdateFailed(f\"Hue error: {err}\") from", "item_id in api: if item_id in current: continue current[item_id] =", "allow_groups and not supports_groups: _LOGGER.warning(\"Please update your Hue bridge to", "brightness 0..255 to hue 1..254 scale.\"\"\" return max(1, round((value /", "create_item, new_items_callback ): \"\"\"Update items.\"\"\" new_items = [] for item_id", "first to ensure a consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR],", "await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities,", "not add another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms )", "new_items = [] for item_id in api: if item_id in", "supports_groups = api_version >= GROUP_MIN_API_VERSION if allow_groups and not supports_groups:", "\"\"\"Convert hass brightness 0..255 to hue 1..254 scale.\"\"\" return max(1,", "is_on(self): \"\"\"Return true if device is on.\"\"\" if self.is_group: return", "_setup_rooms_listener, ) # We add a listener after fetching the", "self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds:", "self.is_group else self.light.state if mode in (\"xy\", \"hs\") and \"xy\"", "to support groups\") light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch,", "self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass() async def", "if self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return", "bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback", "return super().min_mireds return min_mireds @property def max_mireds(self): \"\"\"Return the warmest", "# We filter out '0' too, which can be incorrectly", "to None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut =", "of %s: %s, not valid, setting gamut to None.\" _LOGGER.debug(err,", "async_added_to_hass(self) -> None: \"\"\"Handle entity being added to Home Assistant.\"\"\"", "kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"] = \"lselect\" del command[\"on\"]", "**command) ) await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return the device", "def is_on(self): \"\"\"Return true if device is on.\"\"\" if self.is_group:", "if bri is None: return bri return hue_brightness_to_hass(bri) @property def", "of this Hue light.\"\"\" return self.unique_id @property def name(self): \"\"\"Return", "if is_group: self.is_osram = False self.is_philips = False self.is_innr =", "def async_added_to_hass(self) -> None: \"\"\"Handle entity being added to Home", "if ATTR_HS_COLOR in kwargs: if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] /", "( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color from", "\"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP", "self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, #", "= SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT |", "the listener to update rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def", ") from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER =", "* 255) else: # Philips hue bulb models respond differently", "effect_list(self): \"\"\"Return the list of supported effects.\"\"\" if self.is_osram: return", "api[item_id] if is_group: supported_features = 0 for light_id in api_item.lights:", "254) else: command[\"effect\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action,", "/ 360 * 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100 *", "255) else: # Philips hue bulb models respond differently to", "CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color from .const", "effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def", "del command[\"on\"] elif flash == FLASH_SHORT: command[\"alert\"] = \"select\" del", "supported_features self._rooms = rooms if is_group: self.is_osram = False self.is_philips", "rooms), _setup_rooms_listener, ) # We add a listener after fetching", "light.\"\"\" super().__init__(coordinator) self.light = light self.bridge = bridge self.is_group =", "create_light(item_class, coordinator, bridge, is_group, rooms, api, item_id): \"\"\"Create the light.\"\"\"", "or self.light.modelid, name=self.name, # Not yet exposed as properties in", "= DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer(", "command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH) if flash", "lights added before _async_update_rooms # is called we should not", "self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) ->", "%s, not valid, setting gamut to None.\" _LOGGER.debug(err, self.name, str(self.gamut))", "not supports_groups: _LOGGER.warning(\"Please update your Hue bridge to support groups\")", "not max_mireds: return super().max_mireds return max_mireds @property def is_on(self): \"\"\"Return", "light.\"\"\" return self.unique_id @property def name(self): \"\"\"Return the name of", "= self.light.room[\"id\"] return unique_id @property def device_id(self): \"\"\"Return the ID", "13, 0) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old way", "_LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None @property", "self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err = \"Color gamut", "def hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254 to hass format 0..255.\"\"\"", "to ensure a consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut)", "= DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer(", "self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) -> None: \"\"\"Handle entity being", ") else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async", "the light.\"\"\" api_item = api[item_id] if is_group: supported_features = 0", "HueLight, light_coordinator, bridge, False, rooms), None, ) # We add", "ATTR_HS_COLOR in kwargs: if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360", "up the Hue lights from a config entry.\"\"\" bridge =", "in the Philips Hue App.\" ) _LOGGER.warning(err, self.name) if self.gamut", "coldest color_temp that this light supports.\"\"\" if self.is_group: return super().min_mireds", "async_turn_off(self, **kwargs): \"\"\"Turn the specified or all lights off.\"\"\" command", "cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support", "partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return the", "source = self.light.action if self.is_group else self.light.state if mode in", "trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator( hass,", "if self.light.id in self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN,", "another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener()", "CT color value.\"\"\" # Don't return color temperature unless in", "mode in (\"xy\", \"hs\") and \"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"],", "manufacturer=self.light.manufacturername, # productname added in Hue Bridge API 1.24 #", "EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"] = random.randrange(150, 254) else:", "partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge,", "max_mireds @property def is_on(self): \"\"\"Return true if device is on.\"\"\"", "\"select\" del command[\"on\"] elif not self.is_innr and not self.is_livarno: command[\"alert\"]", "productname added in Hue Bridge API 1.24 # (published 03/05/2018)", "\"Philips\" self.is_innr = light.manufacturername == \"innr\" self.is_ewelink = light.manufacturername ==", "min(255, round((value / 254) * 255)) def hass_to_hue_brightness(value): \"\"\"Convert hass", "\"\"\"Support for the Philips Hue lights.\"\"\" from __future__ import annotations", "async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\" try: with async_timeout.timeout(4):", "temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS", "unless in color temperature mode if self._color_mode != \"ct\": return", "self.light.state[\"on\"] @property def available(self): \"\"\"Return if light is available.\"\"\" return", "kwargs: effect = kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command[\"effect\"] =", "None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def _async_update_rooms():", "group info # 1.12 introduced the state object for groups", "kwargs: if self.is_osram: command[\"hue\"] = int(kwargs[ATTR_HS_COLOR][0] / 360 * 65535)", "allow_groups: update_groups = partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities, partial(create_light,", "rooms.clear() for item_id in bridge.api.groups: group = bridge.api.groups[item_id] if group.type", "the warmest color_temp that this light supports.\"\"\" if self.is_group: return", "= xy_color elif ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"]", "# productname added in Hue Bridge API 1.24 # (published", "current: continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current))", "consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color", "SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type,", "| SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE", "this Hue light.\"\"\" unique_id = self.light.uniqueid if not unique_id and", "# pylint: disable=not-callable cancel_update_rooms_listener = None @callback def _setup_rooms_listener(): nonlocal", "= hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v in bridge.api.config.apiversion.split(\".\")) rooms", "super().max_mireds if self.is_livarno: return 500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if", "updates of the %s \" \"bulb in the Philips Hue", "light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color", "ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT,", "HueLight, group_coordinator, bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener =", "# Once we do a rooms update, we cancel the", "rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups: group =", "GROUP_MIN_API_VERSION = (1, 13, 0) async def async_setup_platform(hass, config, async_add_entities,", "Hue light.\"\"\" def __init__(self, coordinator, bridge, is_group, light, supported_features, rooms):", "ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP:", "update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight,", "# 1.4.0 introduced extended group info # 1.12 introduced the", "features.\"\"\" return self._supported_features @property def effect(self): \"\"\"Return the current effect.\"\"\"", "\"Color gamut of %s: %s, not valid, setting gamut to", "the coldest color_temp that this light supports.\"\"\" if self.is_group: return", "\"\"\"Safely fetch data.\"\"\" try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except", "_LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY,", "data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async", "item_class( coordinator, bridge, is_group, api_item, supported_features, rooms ) async def", "light_id not in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |=", "return self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return the coldest color_temp that", "light_id in group.lights: rooms[light_id] = group.name # Once we do", "HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import", "partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): \"\"\"Turn", "= self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added", "async_update_items( bridge, api, current, async_add_entities, create_item, new_items_callback ): \"\"\"Update items.\"\"\"", "fetch_method): \"\"\"Safely fetch data.\"\"\" try: with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method)", "in color temperature mode if self._color_mode != \"ct\": return None", "in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED)", "async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), None, ) #", "self._rooms = rooms if is_group: self.is_osram = False self.is_philips =", "is_group, light, supported_features, rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light =", "None @property def unique_id(self): \"\"\"Return the unique ID of this", "This is currently used to setup the listener to update", "hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER,", "light.\"\"\" def __init__(self, coordinator, bridge, is_group, light, supported_features, rooms): \"\"\"Initialize", "\"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF,", "\"select\" del command[\"on\"] elif not self.is_innr and not self.is_ewelink and", "not self.is_innr and not self.is_livarno: command[\"alert\"] = \"none\" if self.is_group:", "kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs:", "partial(create_light, HueLight, light_coordinator, bridge, False, rooms), None, ) # We", "light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features", "trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method):", "hue color mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property", "Hue lights from a config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version", "DeviceInfo | None: \"\"\"Return the device info.\"\"\" if self.light.type in", "check for software updates of the %s \" \"bulb in", "SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT", "not min_mireds: return super().min_mireds return min_mireds @property def max_mireds(self): \"\"\"Return", "But even in that case it would have been ignored.", "light between 0..255.\"\"\" if self.is_group: bri = self.light.action.get(\"bri\") else: bri", "= light.manufacturername == \"OSRAM\" self.is_philips = light.manufacturername == \"Philips\" self.is_innr", "else: # Philips hue bulb models respond differently to hue/sat", "SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP =", "name of the Hue light.\"\"\" return self.light.name @property def brightness(self):", "Hue light.\"\"\" unique_id = self.light.uniqueid if not unique_id and self.is_group", "**command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh()", "\"\"\"Convert hue brightness 1..254 to hass format 0..255.\"\"\" return min(255,", "specified or all lights off.\"\"\" command = {\"on\": False} if", "\"\"\"Return the hs color value.\"\"\" mode = self._color_mode source =", "of %s: %s\", self.name, str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\": err", "from __future__ import annotations from datetime import timedelta from functools", "homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import", "await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException as err:", "the brightness of this light between 0..255.\"\"\" if self.is_group: bri", "bridge, bridge.api.groups, {}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True, None),", "| SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = {", "unique_id(self): \"\"\"Return the unique ID of this Hue light.\"\"\" unique_id", "return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added in Hue", "tuple(int(v) for v in bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups =", "supports_groups: update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light,", "return color temperature unless in color temperature mode if self._color_mode", "from functools import partial import logging import random import aiohue", "import color from .const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP,", "update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups:", "config_entry, async_add_entities): \"\"\"Set up the Hue lights from a config", "= (1, 13, 0) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):", "SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features =", "= max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command[\"bri\"] =", "def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for item_id in", "data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator", "\"hs\") and \"xy\" in source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None", "mode = self._color_mode source = self.light.action if self.is_group else self.light.state", "0 for light_id in api_item.lights: if light_id not in bridge.api.lights:", "xy_color elif ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] =", "ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS,", "= SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR", "_setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights, {},", "await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\")", "kwargs: temp = kwargs[ATTR_COLOR_TEMP] command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds)) if", "async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups))", "= api_version >= GROUP_MIN_API_VERSION if allow_groups and not supports_groups: _LOGGER.warning(\"Please", ") _LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err =", "= {\"on\": False} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION]", "= bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or", "to support groups # 1.4.0 introduced extended group info #", "logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF |", "def device_id(self): \"\"\"Return the ID of this Hue light.\"\"\" return", "have been ignored. \"\"\" def create_light(item_class, coordinator, bridge, is_group, rooms,", "current, async_add_entities, create_item, new_items_callback ): \"\"\"Update items.\"\"\" new_items = []", "self.is_group = is_group self._supported_features = supported_features self._rooms = rooms if", "supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else:", "# First do a refresh to see if we can", "App.\" ) _LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err", "super().__init__(coordinator) self.light = light self.bridge = bridge self.is_group = is_group", "XY first to ensure a consistent # color. xy_color =", "list of supported effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP,", "for v in bridge.api.config.apiversion.split(\".\")) rooms = {} allow_groups = bridge.allow_groups", "API version to support groups # 1.4.0 introduced extended group", "-> None: \"\"\"Handle entity being added to Home Assistant.\"\"\" self.async_on_remove(", "False, rooms), _setup_rooms_listener, ) # We add a listener after", "update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First", "SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\":", "lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener =", "@property def supported_features(self): \"\"\"Flag supported features.\"\"\" return self._supported_features @property def", "return bri return hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return the hue", "is not None: # If there are new lights added", "light supports.\"\"\" if self.is_group: return super().max_mireds if self.is_livarno: return 500", "partial(create_light, HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener, ) # We", "self.is_innr and not self.is_livarno: command[\"alert\"] = \"none\" if self.is_group: await", "hs color value.\"\"\" mode = self._color_mode source = self.light.action if", "disable=not-callable cancel_update_rooms_listener = None @callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if", "between 0..255.\"\"\" if self.is_group: bri = self.light.action.get(\"bri\") else: bri =", "self.is_ewelink = False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut", "except aiohue.AiohueException as err: raise UpdateFailed(f\"Hue error: {err}\") from err", "requests, so we convert to XY first to ensure a", "logging import random import aiohue import async_timeout from homeassistant.components.light import", "return min_mireds @property def max_mireds(self): \"\"\"Return the warmest color_temp that", "warmest color_temp that this light supports.\"\"\" if self.is_group: return super().max_mireds", "should not add another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms", "supports.\"\"\" if self.is_group: return super().max_mireds if self.is_livarno: return 500 max_mireds", "await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property def extra_state_attributes(self):", "else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() async def", "bri = self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\") if bri is", "= group.name # Once we do a rooms update, we", ") update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\" try:", "_color_mode(self): \"\"\"Return the hue color mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\")", "bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v in bridge.api.config.apiversion.split(\".\"))", "effect == EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif effect == EFFECT_RANDOM:", "the hs color value.\"\"\" mode = self._color_mode source = self.light.action", "\"colorloop\" elif effect == EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"]", "if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return", "return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property def color_temp(self): \"\"\"Return the", "raise UpdateFailed(f\"Hue error: {err}\") from err @callback def async_update_items( bridge,", "api_version >= GROUP_MIN_API_VERSION if allow_groups and not supports_groups: _LOGGER.warning(\"Please update", "self.is_osram = light.manufacturername == \"OSRAM\" self.is_philips = light.manufacturername == \"Philips\"", "the current effect.\"\"\" return self.light.state.get(\"effect\", None) @property def effect_list(self): \"\"\"Return", "brightness(self): \"\"\"Return the brightness of this light between 0..255.\"\"\" if", "if self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We", "if not light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups: update_lights_without_group_support =", "GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices", "rooms ) async def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up the", "are new lights added before _async_update_rooms # is called we", ") else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property", "before _async_update_rooms # is called we should not add another", "async_add_entities, partial(create_light, HueLight, light_coordinator, bridge, False, rooms), _setup_rooms_listener, ) #", "sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) -> None: \"\"\"Handle", "|= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED else: supported_features", "return self.light.name @property def brightness(self): \"\"\"Return the brightness of this", "65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else: #", "async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error()", "= self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds: return super().max_mireds return max_mireds", "def async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old way of setting up", "= light.manufacturername == \"innr\" self.is_ewelink = light.manufacturername == \"eWeLink\" self.is_livarno", "command[\"ct\"] = max(self.min_mireds, min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command[\"bri\"]", "( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, )", "not self.is_innr and not self.is_ewelink and not self.is_livarno: command[\"alert\"] =", "suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname", "return None suggested_area = None if self.light.id in self._rooms: suggested_area", "party buls if not min_mireds: return super().min_mireds return min_mireds @property", "and not self.is_livarno: command[\"alert\"] = \"none\" if ATTR_EFFECT in kwargs:", "a rooms update, we cancel the listener # until the", "return None @property def color_temp(self): \"\"\"Return the CT color value.\"\"\"", "%s \" \"bulb in the Philips Hue App.\" ) _LOGGER.warning(err,", "255) * 254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a Hue", "hass format 0..255.\"\"\" return min(255, round((value / 254) * 255))", "new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: # This is currently", "else: bri = self.light.state.get(\"bri\") if bri is None: return bri", "self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass() async def async_turn_on(self,", ") if allow_groups: update_groups = partial( async_update_items, bridge, bridge.api.groups, {},", "if self.gamut and not color.check_valid_gamut(self.gamut): err = \"Color gamut of", "FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from", "== EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif effect == EFFECT_RANDOM: command[\"hue\"]", "hue_brightness_to_hass(bri) @property def _color_mode(self): \"\"\"Return the hue color mode.\"\"\" if", "ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR", "self.is_group and self.light.room: unique_id = self.light.room[\"id\"] return unique_id @property def", "REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER", "create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items: # This", "= is_group self._supported_features = supported_features self._rooms = rooms if is_group:", "min(temp, self.max_mireds)) if ATTR_BRIGHTNESS in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash", "= False self.is_philips = False self.is_innr = False self.is_ewelink =", "if mode in (\"xy\", \"hs\") and \"xy\" in source: return", "return self.light.state[\"on\"] @property def available(self): \"\"\"Return if light is available.\"\"\"", "when a user accidentally mentions hue platform in their config.", "update your Hue bridge to support groups\") light_coordinator = DataUpdateCoordinator(", "Hue App.\" ) _LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut):", "model=self.light.productname or self.light.modelid, name=self.name, # Not yet exposed as properties", "from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator", "rooms), None, ) # We add a listener after fetching", "\"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE =", "self.gamut = None @property def unique_id(self): \"\"\"Return the unique ID", "@property def hs_color(self): \"\"\"Return the hs color value.\"\"\" mode =", "= rooms if is_group: self.is_osram = False self.is_philips = False", "gamut of %s: %s, not valid, setting gamut to None.\"", "== \"readytoinstall\": err = ( \"Please check for software updates", "ensure a consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"]", "config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v", ") # We add a listener after fetching the data,", "self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None", ") return group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge,", "self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds: return super().max_mireds return max_mireds @property", "immediate=True ), ) # First do a refresh to see", "self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else:", "this light between 0..255.\"\"\" if self.is_group: bri = self.light.action.get(\"bri\") else:", "for the Philips Hue lights.\"\"\" from __future__ import annotations from", "hue platform in their config. But even in that case", "None: \"\"\"Handle entity being added to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates(", "if self.is_group: return super().max_mireds if self.is_livarno: return 500 max_mireds =", "100 * 255) else: # Philips hue bulb models respond", "\"\"\"Set up the Hue lights from a config entry.\"\"\" bridge", "'0' too, which can be incorrectly reported by 3rd party", "this light supports.\"\"\" if self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\",", "== \"Philips\" self.is_innr = light.manufacturername == \"innr\" self.is_ewelink = light.manufacturername", "bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), )", "EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION,", "the specified or all lights on.\"\"\" command = {\"on\": True}", "self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): \"\"\"Turn the specified or all", "bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) #", "and self.light.room: unique_id = self.light.room[\"id\"] return unique_id @property def device_id(self):", "\"\"\"Handle entity being added to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE,", "async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old way of setting", "= kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"] = \"lselect\" del", "listener after fetching the data, so manually trigger listener bridge.reset_jobs.append(", "mode if self._color_mode != \"ct\": return None if self.is_group: return", "[EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo | None: \"\"\"Return", "color_temp(self): \"\"\"Return the CT color value.\"\"\" # Don't return color", "update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ),", "is_group: self.is_osram = False self.is_philips = False self.is_innr = False", "\"\"\"Representation of a Hue light.\"\"\" def __init__(self, coordinator, bridge, is_group,", "ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR,", "aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) ->", "supports_groups: _LOGGER.warning(\"Please update your Hue bridge to support groups\") light_coordinator", "homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP,", "on.\"\"\" command = {\"on\": True} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"]", "this light supports.\"\"\" if self.is_group: return super().max_mireds if self.is_livarno: return", "return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo", "if light_id not in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features", "await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command)", "not self.is_ewelink and not self.is_livarno: command[\"alert\"] = \"none\" if ATTR_EFFECT", "except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err", "the Philips Hue App.\" ) _LOGGER.warning(err, self.name) if self.gamut and", "hue brightness 1..254 to hass format 0..255.\"\"\" return min(255, round((value", "from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator,", "self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return the device state attributes.\"\"\" if", "command = {\"on\": True} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] =", ") async def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up the Hue", "light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL,", "\"\"\"Old way of setting up Hue lights. Can only be", "GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area = None if", "Can only be called when a user accidentally mentions hue", "def device_info(self) -> DeviceInfo | None: \"\"\"Return the device info.\"\"\"", "lights off.\"\"\" command = {\"on\": False} if ATTR_TRANSITION in kwargs:", "def async_turn_on(self, **kwargs): \"\"\"Turn the specified or all lights on.\"\"\"", "= light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut", "flash == FLASH_SHORT: command[\"alert\"] = \"select\" del command[\"on\"] elif not", "\"lselect\" del command[\"on\"] elif flash == FLASH_SHORT: command[\"alert\"] = \"select\"", "groups\") light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update),", "def max_mireds(self): \"\"\"Return the warmest color_temp that this light supports.\"\"\"", "to setup the listener to update rooms if new_items_callback: new_items_callback()", "kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH) if", "self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return the", "= False self.is_ewelink = False self.is_livarno = False self.gamut_typ =", "group state objects GROUP_MIN_API_VERSION = (1, 13, 0) async def", "self.device_id)}, manufacturer=self.light.manufacturername, # productname added in Hue Bridge API 1.24", "update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light, HueLight,", "update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch data.\"\"\" try: with", "group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities, partial(create_light,", "async def async_added_to_hass(self) -> None: \"\"\"Handle entity being added to", "so we convert to XY first to ensure a consistent", "= \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else:", "bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue for light_id in group.lights:", "continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features =", "API 1.24 # (published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, #", "do a rooms update, we cancel the listener # until", "lights.\"\"\" from __future__ import annotations from datetime import timedelta from", "self.light.action if self.is_group else self.light.state if mode in (\"xy\", \"hs\")", "lights on.\"\"\" command = {\"on\": True} if ATTR_TRANSITION in kwargs:", "self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram = light.manufacturername", ") from homeassistant.util import color from .const import ( DOMAIN", "def hs_color(self): \"\"\"Return the hs color value.\"\"\" mode = self._color_mode", "value.\"\"\" mode = self._color_mode source = self.light.action if self.is_group else", "del command[\"on\"] elif not self.is_innr and not self.is_ewelink and not", "== FLASH_SHORT: command[\"alert\"] = \"select\" del command[\"on\"] elif not self.is_innr", "not self.is_livarno: command[\"alert\"] = \"none\" if ATTR_EFFECT in kwargs: effect", "properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def", "the Hue lights from a config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id]", "ATTR_BRIGHTNESS in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if", "\"\"\"Turn the specified or all lights off.\"\"\" command = {\"on\":", "continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if", "remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH", "self.light.colorgamut _LOGGER.debug(\"Color gamut of %s: %s\", self.name, str(self.gamut)) if self.light.swupdatestate", "= \"none\" if ATTR_EFFECT in kwargs: effect = kwargs[ATTR_EFFECT] if", "@property def _color_mode(self): \"\"\"Return the hue color mode.\"\"\" if self.is_group:", "currently used to setup the listener to update rooms if", "= bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue for light_id in", "bridge.api.groups: group = bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue for", "None) @property def effect_list(self): \"\"\"Return the list of supported effects.\"\"\"", "We add a listener after fetching the data, so manually", "[] for item_id in api: if item_id in current: continue", "False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram =", "discovery_info=None): \"\"\"Old way of setting up Hue lights. Can only", "254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a Hue light.\"\"\" def", "SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE", "supported_features, rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light = light self.bridge", "color_temp that this light supports.\"\"\" if self.is_group: return super().min_mireds min_mireds", "= self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\") if bri is None:", "Philips hue bulb models respond differently to hue/sat # requests,", "to hass format 0..255.\"\"\" return min(255, round((value / 254) *", "timedelta from functools import partial import logging import random import", "import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM,", "and not supports_groups: _LOGGER.warning(\"Please update your Hue bridge to support", "partial(create_light, HueLight, group_coordinator, bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener", "cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First do a refresh to", "not in bridge.api.lights: continue light = bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type,", "err: raise UpdateFailed(f\"Hue error: {err}\") from err @callback def async_update_items(", "/ 255) * 254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a", "self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color", "super().async_added_to_hass() async def async_turn_on(self, **kwargs): \"\"\"Turn the specified or all", "254) * 255)) def hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255 to", "timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE", "int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG:", "\"\"\"Return the list of supported effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM]", "unique_id @property def device_id(self): \"\"\"Return the ID of this Hue", "if device is on.\"\"\" if self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"]", "# Not yet exposed as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"],", "min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter out '0' too,", "is_group, rooms, api, item_id): \"\"\"Create the light.\"\"\" api_item = api[item_id]", "self.is_ewelink and not self.is_livarno: command[\"alert\"] = \"none\" if ATTR_EFFECT in", "if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) if", "def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not None: #", "return self._supported_features @property def effect(self): \"\"\"Return the current effect.\"\"\" return", "differently to hue/sat # requests, so we convert to XY", "10) flash = kwargs.get(ATTR_FLASH) if flash == FLASH_LONG: command[\"alert\"] =", "added in Hue Bridge API 1.24 # (published 03/05/2018) model=self.light.productname", "filter out '0' too, which can be incorrectly reported by", "the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return", "= self.light.action if self.is_group else self.light.state if mode in (\"xy\",", "specified or all lights on.\"\"\" command = {\"on\": True} if", "{}).get(\"max\") if not max_mireds: return super().max_mireds return max_mireds @property def", "color.check_valid_gamut(self.gamut): err = \"Color gamut of %s: %s, not valid,", "self.light.state if mode in (\"xy\", \"hs\") and \"xy\" in source:", ".const import ( DOMAIN as HUE_DOMAIN, GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_LIGHT_SOURCE, GROUP_TYPE_LUMINAIRE, GROUP_TYPE_ROOM,", "suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid), ) async def async_added_to_hass(self) -> None:", "import partial import logging import random import aiohue import async_timeout", "bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items, bridge, bridge.api.lights,", "min_mireds: return super().min_mireds return min_mireds @property def max_mireds(self): \"\"\"Return the", "valid, setting gamut to None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ =", "{\"on\": False} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] *", "= self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut of %s: %s\",", "are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener = None", "color_temp that this light supports.\"\"\" if self.is_group: return super().max_mireds if", "\"\"\"Return the brightness of this light between 0..255.\"\"\" if self.is_group:", "light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups: update_lights_without_group_support = partial( async_update_items,", "GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum Hue Bridge API version to", "command[\"xy\"] = xy_color elif ATTR_COLOR_TEMP in kwargs: temp = kwargs[ATTR_COLOR_TEMP]", "): return None suggested_area = None if self.light.id in self._rooms:", "from a config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v)", "False self.is_ewelink = False self.is_livarno = False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE", "self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut of %s:", "# Philips hue bulb models respond differently to hue/sat #", "command[\"alert\"] = \"lselect\" del command[\"on\"] elif flash == FLASH_SHORT: command[\"alert\"]", "import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, )", "Hue bridge to support groups\") light_coordinator = DataUpdateCoordinator( hass, _LOGGER,", "0..255.\"\"\" return min(255, round((value / 254) * 255)) def hass_to_hue_brightness(value):", "0..255 to hue 1..254 scale.\"\"\" return max(1, round((value / 255)", "bridge, False, rooms), _setup_rooms_listener, ) # We add a listener", "their config. But even in that case it would have", "not supports_groups: update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights, {}, async_add_entities,", "the Hue light.\"\"\" return self.light.name @property def brightness(self): \"\"\"Return the", ">= GROUP_MIN_API_VERSION if allow_groups and not supports_groups: _LOGGER.warning(\"Please update your", "supported effects.\"\"\" if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property", "not valid, setting gamut to None.\" _LOGGER.debug(err, self.name, str(self.gamut)) self.gamut_typ", "supported features.\"\"\" return self._supported_features @property def effect(self): \"\"\"Return the current", "current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api, current)) if new_items:", "SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group, api_item, supported_features, rooms", "import random import aiohue import async_timeout from homeassistant.components.light import (", "= \"select\" del command[\"on\"] elif not self.is_innr and not self.is_livarno:", "raise UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException as err: raise UpdateFailed(f\"Hue", "mentions hue platform in their config. But even in that", "(published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, # Not yet exposed", "in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in", "self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property", "not light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups: update_lights_without_group_support = partial(", "@property def brightness(self): \"\"\"Return the brightness of this light between", "kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in kwargs:", "async_turn_on(self, **kwargs): \"\"\"Turn the specified or all lights on.\"\"\" command", "GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def unique_id(self): \"\"\"Return the unique", "GROUP_TYPE_ROOM: continue for light_id in group.lights: rooms[light_id] = group.name #", "from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION,", "object for groups # 1.13 introduced \"any_on\" to group state", "next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable", "light_coordinator, bridge, False, rooms), None, ) # We add a", "from datetime import timedelta from functools import partial import logging", "{} allow_groups = bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION if", "api, item_id): \"\"\"Create the light.\"\"\" api_item = api[item_id] if is_group:", "not None: # If there are new lights added before", ") await super().async_added_to_hass() async def async_turn_on(self, **kwargs): \"\"\"Turn the specified", "api, current, async_add_entities, create_item, new_items_callback ): \"\"\"Update items.\"\"\" new_items =", "group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL,", "self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return the", "# is called we should not add another listener return", "if self.is_osram: return [EFFECT_RANDOM] return [EFFECT_COLORLOOP, EFFECT_RANDOM] @property def device_info(self)", "immediate=True ), ) if allow_groups: update_groups = partial( async_update_items, bridge,", "EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif effect == EFFECT_RANDOM: command[\"hue\"] =", "of this light between 0..255.\"\"\" if self.is_group: bri = self.light.action.get(\"bri\")", "def async_turn_off(self, **kwargs): \"\"\"Turn the specified or all lights off.\"\"\"", "3rd party buls if not min_mireds: return super().min_mireds return min_mireds", "bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if", "super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter out '0'", "_LOGGER.warning(err, self.name) if self.gamut and not color.check_valid_gamut(self.gamut): err = \"Color", "if self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def available(self): \"\"\"Return", "api_version = tuple(int(v) for v in bridge.api.config.apiversion.split(\".\")) rooms = {}", "int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in kwargs: if self.is_osram: command[\"hue\"]", "extended group info # 1.12 introduced the state object for", "all lights on.\"\"\" command = {\"on\": True} if ATTR_TRANSITION in", "await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady if not supports_groups:", "/ 254) * 255)) def hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255", "from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity", "# Otherwise we will declare not ready. await light_coordinator.async_refresh() if", "return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def hs_color(self): \"\"\"Return the hs", "else self.light.state if mode in (\"xy\", \"hs\") and \"xy\" in", "in current: continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge, api,", "scale.\"\"\" return max(1, round((value / 255) * 254)) class HueLight(CoordinatorEntity,", "await self.coordinator.async_request_refresh() @property def extra_state_attributes(self): \"\"\"Return the device state attributes.\"\"\"", "SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended", "ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP,", "= group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support =", "listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely", "command = {\"on\": False} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] =", "or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator,", "ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG, FLASH_SHORT,", "500 max_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"max\") if not max_mireds: return super().max_mireds", "def hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255 to hue 1..254 scale.\"\"\"", "the %s \" \"bulb in the Philips Hue App.\" )", "self.is_innr and not self.is_ewelink and not self.is_livarno: command[\"alert\"] = \"none\"", "ID of this Hue light.\"\"\" unique_id = self.light.uniqueid if not", "= {\"on\": True} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION]", "is called we should not add another listener return cancel_update_rooms_listener", "**command) ) await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): \"\"\"Turn the", "elif not self.is_innr and not self.is_ewelink and not self.is_livarno: command[\"alert\"]", "aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err except", "listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator( hass, _LOGGER,", "SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended color", "| SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE", "== EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"] = random.randrange(150, 254)", "if flash == FLASH_LONG: command[\"alert\"] = \"lselect\" del command[\"on\"] elif", "of this Hue light.\"\"\" unique_id = self.light.uniqueid if not unique_id", "= int(kwargs[ATTR_TRANSITION] * 10) if ATTR_HS_COLOR in kwargs: if self.is_osram:", "async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254 to hass format", "SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS", "\"\"\"Create the light.\"\"\" api_item = api[item_id] if is_group: supported_features =", "def supported_features(self): \"\"\"Flag supported features.\"\"\" return self._supported_features @property def effect(self):", "return None if self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def", "supported_features, rooms ) async def async_setup_entry(hass, config_entry, async_add_entities): \"\"\"Set up", "= light self.bridge = bridge self.is_group = is_group self._supported_features =", "partial(self.light.set_action, **command) ) else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await", "DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass,", "random.randrange(0, 65535) command[\"sat\"] = random.randrange(150, 254) else: command[\"effect\"] = \"none\"", "_async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await group_coordinator.async_refresh() update_lights_with_group_support = partial( async_update_items,", "command[\"on\"] elif not self.is_innr and not self.is_livarno: command[\"alert\"] = \"none\"", "Hue Bridge API version to support groups # 1.4.0 introduced", "= kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif", "the unique ID of this Hue light.\"\"\" unique_id = self.light.uniqueid", "= random.randrange(0, 65535) command[\"sat\"] = random.randrange(150, 254) else: command[\"effect\"] =", "SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE |", "self.is_innr = False self.is_ewelink = False self.is_livarno = False self.gamut_typ", "super().min_mireds return min_mireds @property def max_mireds(self): \"\"\"Return the warmest color_temp", "effect == EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"] = random.randrange(150,", "import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import DeviceInfo", "self.is_innr = light.manufacturername == \"innr\" self.is_ewelink = light.manufacturername == \"eWeLink\"", "self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def unique_id(self): \"\"\"Return", "plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\": SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP =", "manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator = DataUpdateCoordinator(", "None @property def color_temp(self): \"\"\"Return the CT color value.\"\"\" #", "case it would have been ignored. \"\"\" def create_light(item_class, coordinator,", ") from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from", "GROUP_TYPE_ROOM, REQUEST_REFRESH_DELAY, ) from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5)", "None suggested_area = None if self.light.id in self._rooms: suggested_area =", "from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce", "SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP", "command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else: # Philips", "unique_id and self.is_group and self.light.room: unique_id = self.light.room[\"id\"] return unique_id", "@property def device_info(self) -> DeviceInfo | None: \"\"\"Return the device", "\"\"\"Return the coldest color_temp that this light supports.\"\"\" if self.is_group:", "\"any_on\" to group state objects GROUP_MIN_API_VERSION = (1, 13, 0)", "the Philips Hue lights.\"\"\" from __future__ import annotations from datetime", "* 10) if ATTR_HS_COLOR in kwargs: if self.is_osram: command[\"hue\"] =", "cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups: update_groups = partial( async_update_items,", "\"\"\"Return the current effect.\"\"\" return self.light.state.get(\"effect\", None) @property def effect_list(self):", "light.manufacturername == \"Philips\" self.is_innr = light.manufacturername == \"innr\" self.is_ewelink =", "in Hue Bridge API 1.24 # (published 03/05/2018) model=self.light.productname or", "new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue brightness 1..254 to", "too, which can be incorrectly reported by 3rd party buls", "command[\"alert\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) )", "del command[\"on\"] elif not self.is_innr and not self.is_livarno: command[\"alert\"] =", "-> DeviceInfo | None: \"\"\"Return the device info.\"\"\" if self.light.type", "functools import partial import logging import random import aiohue import", "added before _async_update_rooms # is called we should not add", "introduced \"any_on\" to group state objects GROUP_MIN_API_VERSION = (1, 13,", "( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH, ATTR_HS_COLOR, ATTR_TRANSITION, EFFECT_COLORLOOP, EFFECT_RANDOM, FLASH_LONG,", "DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from", "@callback def _setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not None:", "this Hue light.\"\"\" return self.unique_id @property def name(self): \"\"\"Return the", "color value.\"\"\" mode = self._color_mode source = self.light.action if self.is_group", "Otherwise we will declare not ready. await light_coordinator.async_refresh() if not", "flash == FLASH_LONG: command[\"alert\"] = \"lselect\" del command[\"on\"] elif flash", "{}).get(\"min\") # We filter out '0' too, which can be", "supports.\"\"\" if self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") #", "self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass() async", "rooms[light_id] = group.name # Once we do a rooms update,", "bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION if allow_groups and not", "\"\"\"Return the CT color value.\"\"\" # Don't return color temperature", "_LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) # First do a refresh", "if not min_mireds: return super().min_mireds return min_mireds @property def max_mireds(self):", "time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint: disable=not-callable cancel_update_rooms_listener", "SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended color light\": SUPPORT_HUE_EXTENDED,", "def __init__(self, coordinator, bridge, is_group, light, supported_features, rooms): \"\"\"Initialize the", "ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady if not", "SUPPORT_HUE_COLOR_TEMP, } ATTR_IS_HUE_GROUP = \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum", "else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return item_class( coordinator, bridge, is_group,", "{ \"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\":", "bridge, is_group, rooms, api, item_id): \"\"\"Create the light.\"\"\" api_item =", "fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) )", "light is available.\"\"\" return self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable", "# 1.12 introduced the state object for groups # 1.13", "GROUP_MIN_API_VERSION if allow_groups and not supports_groups: _LOGGER.warning(\"Please update your Hue", "Hue light.\"\"\" return self.light.name @property def brightness(self): \"\"\"Return the brightness", "added to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state )", "out '0' too, which can be incorrectly reported by 3rd", "return self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"] )", "rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light = light self.bridge =", "kwargs[ATTR_EFFECT] if effect == EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif effect", "int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else: # Philips hue bulb", "color light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off", "name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True", "\"innr\" self.is_ewelink = light.manufacturername == \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ", "return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property def available(self): \"\"\"Return if light", "None @callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for", "\"\"\"Return the unique ID of this Hue light.\"\"\" unique_id =", "device is on.\"\"\" if self.is_group: return self.light.state[\"any_on\"] return self.light.state[\"on\"] @property", "or all lights on.\"\"\" command = {\"on\": True} if ATTR_TRANSITION", "# (published 03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, # Not yet", "from err @callback def async_update_items( bridge, api, current, async_add_entities, create_item,", "= SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP", "be called when a user accidentally mentions hue platform in", "with async_timeout.timeout(4): return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await", "import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_FLASH,", "the CT color value.\"\"\" # Don't return color temperature unless", "bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge, fetch_method): \"\"\"Safely fetch", "\"\"\"Flag supported features.\"\"\" return self._supported_features @property def effect(self): \"\"\"Return the", "objects GROUP_MIN_API_VERSION = (1, 13, 0) async def async_setup_platform(hass, config,", "\"\"\"Return the device state attributes.\"\"\" if not self.is_group: return {}", "Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) ) await", "after fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support)", "if self.is_group else self.light.state if mode in (\"xy\", \"hs\") and", "async def async_turn_off(self, **kwargs): \"\"\"Turn the specified or all lights", "self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return the coldest color_temp that this", "if light is available.\"\"\" return self.coordinator.last_update_success and ( self.is_group or", "off.\"\"\" command = {\"on\": False} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"]", "from .helpers import remove_devices SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__)", "= None @callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear()", "DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added in Hue Bridge", "bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException as err: raise", "@property def available(self): \"\"\"Return if light is available.\"\"\" return self.coordinator.last_update_success", "= light.manufacturername == \"Philips\" self.is_innr = light.manufacturername == \"innr\" self.is_ewelink", "color value.\"\"\" # Don't return color temperature unless in color", "source: return color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property def color_temp(self): \"\"\"Return", "self.is_group: bri = self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\") if bri", "\"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups: group", "None if self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def min_mireds(self):", "coordinator, bridge, is_group, rooms, api, item_id): \"\"\"Create the light.\"\"\" api_item", "bridge, api, current, async_add_entities, create_item, new_items_callback ): \"\"\"Update items.\"\"\" new_items", "PlatformNotReady if not supports_groups: update_lights_without_group_support = partial( async_update_items, bridge, bridge.api.lights,", "available.\"\"\" return self.coordinator.last_update_success and ( self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"]", "to update rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert", "!= \"ct\": return None if self.is_group: return self.light.action.get(\"ct\") return self.light.state.get(\"ct\")", "bulb models respond differently to hue/sat # requests, so we", "@callback def _async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for item_id", "None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None @callback def _async_update_rooms(): \"\"\"Update", "= False self.gamut_typ = GAMUT_TYPE_UNAVAILABLE self.gamut = None else: self.is_osram", "= GAMUT_TYPE_UNAVAILABLE self.gamut = None @property def unique_id(self): \"\"\"Return the", "import Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import (", "# If there are new lights added before _async_update_rooms #", "if allow_groups: update_groups = partial( async_update_items, bridge, bridge.api.groups, {}, async_add_entities,", "@property def device_id(self): \"\"\"Return the ID of this Hue light.\"\"\"", "item_id in current: continue current[item_id] = create_item(api, item_id) new_items.append(current[item_id]) bridge.hass.async_create_task(remove_devices(bridge,", "\"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action, **command) ) else: await", "await self.coordinator.async_request_refresh() async def async_turn_off(self, **kwargs): \"\"\"Turn the specified or", "err = \"Color gamut of %s: %s, not valid, setting", "= supported_features or SUPPORT_HUE_EXTENDED else: supported_features = SUPPORT_HUE.get(api_item.type, SUPPORT_HUE_EXTENDED) return", "# color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] = xy_color elif", "bridge to support groups\") light_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"light\",", "would have been ignored. \"\"\" def create_light(item_class, coordinator, bridge, is_group,", "aiohue import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT,", "False self.is_philips = False self.is_innr = False self.is_ewelink = False", "), ) if allow_groups: update_groups = partial( async_update_items, bridge, bridge.api.groups,", "add another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener)", "light.manufacturername == \"innr\" self.is_ewelink = light.manufacturername == \"eWeLink\" self.is_livarno =", "to Home Assistant.\"\"\" self.async_on_remove( self.bridge.listen_updates( self.light.ITEM_TYPE, self.light.id, self.async_write_ha_state ) )", "def _color_mode(self): \"\"\"Return the hue color mode.\"\"\" if self.is_group: return", "* 65535) command[\"sat\"] = int(kwargs[ATTR_HS_COLOR][1] / 100 * 255) else:", "= self.light.state.get(\"bri\") if bri is None: return bri return hue_brightness_to_hass(bri)", "\"\"\"Return the name of the Hue light.\"\"\" return self.light.name @property", "self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter out '0' too, which can", "nonlocal cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups: group = bridge.api.groups[item_id]", "DataUpdateCoordinator( hass, _LOGGER, name=\"light\", update_method=partial(async_safe_fetch, bridge, bridge.api.lights.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass,", "group_coordinator, bridge, True, None), None, ) bridge.reset_jobs.append(group_coordinator.async_add_listener(update_groups)) cancel_update_rooms_listener = None", "return await bridge.async_request_call(fetch_method) except aiohue.Unauthorized as err: await bridge.handle_unauthorized_error() raise", "True} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10)", "SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_FLASH, SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import", "LightEntity, ) from homeassistant.core import callback from homeassistant.exceptions import PlatformNotReady", "from homeassistant.helpers.update_coordinator import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util", "be incorrectly reported by 3rd party buls if not min_mireds:", "by 3rd party buls if not min_mireds: return super().min_mireds return", "config, async_add_entities, discovery_info=None): \"\"\"Old way of setting up Hue lights.", "light.manufacturername == \"OSRAM\" self.is_philips = light.manufacturername == \"Philips\" self.is_innr =", "( self.is_group or self.bridge.allow_unreachable or self.light.state[\"reachable\"] ) @property def supported_features(self):", "and not self.is_ewelink and not self.is_livarno: command[\"alert\"] = \"none\" if", "not self.is_livarno: command[\"alert\"] = \"none\" if self.is_group: await self.bridge.async_request_call( partial(self.light.set_action,", "item_id in bridge.api.groups: group = bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM:", "max(1, round((value / 255) * 254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation", "else: await self.bridge.async_request_call( partial(self.light.set_state, **command) ) await self.coordinator.async_request_refresh() @property def", "SUPPORT_TRANSITION, LightEntity, ) from homeassistant.core import callback from homeassistant.exceptions import", "03/05/2018) model=self.light.productname or self.light.modelid, name=self.name, # Not yet exposed as", "for software updates of the %s \" \"bulb in the", "yet exposed as properties in aiohue suggested_area=suggested_area, sw_version=self.light.raw[\"swversion\"], via_device=(HUE_DOMAIN, self.bridge.api.config.bridgeid),", "error: {err}\") from err @callback def async_update_items( bridge, api, current,", "not ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success: raise PlatformNotReady if", "if ATTR_BRIGHTNESS in kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH)", "round((value / 254) * 255)) def hass_to_hue_brightness(value): \"\"\"Convert hass brightness", "\"\"\"Return true if device is on.\"\"\" if self.is_group: return self.light.state[\"any_on\"]", "to hue 1..254 scale.\"\"\" return max(1, round((value / 255) *", "self.name, str(self.gamut)) if self.light.swupdatestate == \"readytoinstall\": err = ( \"Please", "( \"Please check for software updates of the %s \"", "import ( CoordinatorEntity, DataUpdateCoordinator, UpdateFailed, ) from homeassistant.util import color", "request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY, immediate=True ), ) if allow_groups: update_groups", "= \"colorloop\" elif effect == EFFECT_RANDOM: command[\"hue\"] = random.randrange(0, 65535)", "EFFECT_RANDOM] @property def device_info(self) -> DeviceInfo | None: \"\"\"Return the", "homeassistant.exceptions import PlatformNotReady from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.entity import", "the next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener() # pylint:", "# We add a listener after fetching the data, so", "= bridge.allow_groups supports_groups = api_version >= GROUP_MIN_API_VERSION if allow_groups and", "= \"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum Hue Bridge API", "Debouncer from homeassistant.helpers.entity import DeviceInfo from homeassistant.helpers.update_coordinator import ( CoordinatorEntity,", "Minimum Hue Bridge API version to support groups # 1.4.0", "_LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update), update_interval=SCAN_INTERVAL, request_refresh_debouncer=Debouncer( bridge.hass, _LOGGER, cooldown=REQUEST_REFRESH_DELAY,", "command[\"hue\"] = random.randrange(0, 65535) command[\"sat\"] = random.randrange(150, 254) else: command[\"effect\"]", "= logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH | SUPPORT_TRANSITION SUPPORT_HUE_DIMMABLE = SUPPORT_HUE_ON_OFF", "listener return cancel_update_rooms_listener = group_coordinator.async_add_listener( _async_update_rooms ) bridge.reset_jobs.append(cancel_update_rooms_listener) _setup_rooms_listener() await", "random import aiohue import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS,", "identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername, # productname added in Hue Bridge API", "(1, 13, 0) async def async_setup_platform(hass, config, async_add_entities, discovery_info=None): \"\"\"Old", "device state attributes.\"\"\" if not self.is_group: return {} return {ATTR_IS_HUE_GROUP:", "update, we cancel the listener # until the next time", "brightness 1..254 to hass format 0..255.\"\"\" return min(255, round((value /", "group = bridge.api.groups[item_id] if group.type != GROUP_TYPE_ROOM: continue for light_id", "None: \"\"\"Return the device info.\"\"\" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP,", "\"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ = self.light.colorgamuttype self.gamut = self.light.colorgamut", "SUPPORT_HUE_ON_OFF | SUPPORT_BRIGHTNESS SUPPORT_HUE_COLOR_TEMP = SUPPORT_HUE_DIMMABLE | SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR =", "so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_without_group_support) ) return group_coordinator =", "do a refresh to see if we can reach the", "async_add_entities): \"\"\"Set up the Hue lights from a config entry.\"\"\"", "in self._rooms: suggested_area = self._rooms[self.light.id] return DeviceInfo( identifiers={(HUE_DOMAIN, self.device_id)}, manufacturer=self.light.manufacturername,", "nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not None: # If there", "hass_to_hue_brightness(value): \"\"\"Convert hass brightness 0..255 to hue 1..254 scale.\"\"\" return", "return self.light.action.get(\"ct\") return self.light.state.get(\"ct\") @property def min_mireds(self): \"\"\"Return the coldest", "is_group, api_item, supported_features, rooms ) async def async_setup_entry(hass, config_entry, async_add_entities):", "a consistent # color. xy_color = color.color_hs_to_xy(*kwargs[ATTR_HS_COLOR], self.gamut) command[\"xy\"] =", "LightEntity): \"\"\"Representation of a Hue light.\"\"\" def __init__(self, coordinator, bridge,", "= api[item_id] if is_group: supported_features = 0 for light_id in", "way of setting up Hue lights. Can only be called", "if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) flash", "/ 100 * 255) else: # Philips hue bulb models", "self._color_mode != \"ct\": return None if self.is_group: return self.light.action.get(\"ct\") return", "SCAN_INTERVAL = timedelta(seconds=5) _LOGGER = logging.getLogger(__name__) SUPPORT_HUE_ON_OFF = SUPPORT_FLASH |", "light, supported_features, rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light = light", "GROUP_TYPE_LUMINAIRE, GROUP_TYPE_LIGHT_SOURCE, ): return None suggested_area = None if self.light.id", "round((value / 255) * 254)) class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of", "if we can reach the hub. # Otherwise we will", "software updates of the %s \" \"bulb in the Philips", "listener to update rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value):", "{}, async_add_entities, partial(create_light, HueLight, group_coordinator, bridge, True, None), None, )", "= SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR SUPPORT_HUE = { \"Extended color light\":", "command[\"on\"] elif flash == FLASH_SHORT: command[\"alert\"] = \"select\" del command[\"on\"]", "await super().async_added_to_hass() async def async_turn_on(self, **kwargs): \"\"\"Turn the specified or", "rooms if new_items_callback: new_items_callback() async_add_entities(new_items) def hue_brightness_to_hass(value): \"\"\"Convert hue brightness", "elif flash == FLASH_SHORT: command[\"alert\"] = \"select\" del command[\"on\"] elif", "| SUPPORT_COLOR_TEMP SUPPORT_HUE_COLOR = SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED", "a listener after fetching the data, so manually trigger listener", "# until the next time lights are added bridge.reset_jobs.remove(cancel_update_rooms_listener) cancel_update_rooms_listener()", "as err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException", "that this light supports.\"\"\" if self.is_group: return super().max_mireds if self.is_livarno:", "rooms if is_group: self.is_osram = False self.is_philips = False self.is_innr", "raise PlatformNotReady if not supports_groups: update_lights_without_group_support = partial( async_update_items, bridge,", "@property def effect_list(self): \"\"\"Return the list of supported effects.\"\"\" if", "bridge.api.lights[light_id] supported_features |= SUPPORT_HUE.get(light.type, SUPPORT_HUE_EXTENDED) supported_features = supported_features or SUPPORT_HUE_EXTENDED", "for groups # 1.13 introduced \"any_on\" to group state objects", "cancel_update_rooms_listener is not None: # If there are new lights", "import aiohue import async_timeout from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP,", "= self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter out '0' too, which", "@property def effect(self): \"\"\"Return the current effect.\"\"\" return self.light.state.get(\"effect\", None)", "the device info.\"\"\" if self.light.type in ( GROUP_TYPE_LIGHT_GROUP, GROUP_TYPE_ROOM, GROUP_TYPE_LUMINAIRE,", "self.is_group: return super().min_mireds min_mireds = self.light.controlcapabilities.get(\"ct\", {}).get(\"min\") # We filter", "color temperature mode if self._color_mode != \"ct\": return None if", "manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) ) update_lights_with_group_support() async def async_safe_fetch(bridge,", "bridge, is_group, light, supported_features, rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator) self.light", "self.gamut = self.light.colorgamut _LOGGER.debug(\"Color gamut of %s: %s\", self.name, str(self.gamut))", "brightness of this light between 0..255.\"\"\" if self.is_group: bri =", "a config entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for", "is currently used to setup the listener to update rooms", "self.is_ewelink = light.manufacturername == \"eWeLink\" self.is_livarno = light.manufacturername.startswith(\"_TZ3000_\") self.gamut_typ =", "= \"None\" # Minimum Hue Bridge API version to support", "color mode.\"\"\" if self.is_group: return self.light.action.get(\"colormode\") return self.light.state.get(\"colormode\") @property def", "if not max_mireds: return super().max_mireds return max_mireds @property def is_on(self):", "hue bulb models respond differently to hue/sat # requests, so", "\"is_hue_group\" GAMUT_TYPE_UNAVAILABLE = \"None\" # Minimum Hue Bridge API version", "we should not add another listener return cancel_update_rooms_listener = group_coordinator.async_add_listener(", "return group_coordinator = DataUpdateCoordinator( hass, _LOGGER, name=\"group\", update_method=partial(async_safe_fetch, bridge, bridge.api.groups.update),", "import timedelta from functools import partial import logging import random", "a Hue light.\"\"\" def __init__(self, coordinator, bridge, is_group, light, supported_features,", "False} if ATTR_TRANSITION in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10)", "__future__ import annotations from datetime import timedelta from functools import", "\"Dimmable light\": SUPPORT_HUE_DIMMABLE, \"On/Off plug-in unit\": SUPPORT_HUE_ON_OFF, \"Color temperature light\":", "coordinator, bridge, is_group, light, supported_features, rooms): \"\"\"Initialize the light.\"\"\" super().__init__(coordinator)", "= None else: self.is_osram = light.manufacturername == \"OSRAM\" self.is_philips =", "kwargs: command[\"bri\"] = hass_to_hue_brightness(kwargs[ATTR_BRIGHTNESS]) flash = kwargs.get(ATTR_FLASH) if flash ==", "_async_update_rooms(): \"\"\"Update rooms.\"\"\" nonlocal cancel_update_rooms_listener rooms.clear() for item_id in bridge.api.groups:", "your Hue bridge to support groups\") light_coordinator = DataUpdateCoordinator( hass,", "group.name # Once we do a rooms update, we cancel", "== FLASH_LONG: command[\"alert\"] = \"lselect\" del command[\"on\"] elif flash ==", "\"readytoinstall\": err = ( \"Please check for software updates of", "fetching the data, so manually trigger listener bridge.reset_jobs.append( light_coordinator.async_add_listener(update_lights_with_group_support) )", "import annotations from datetime import timedelta from functools import partial", "First do a refresh to see if we can reach", "# This is currently used to setup the listener to", "SUPPORT_HUE_DIMMABLE | SUPPORT_EFFECT | SUPPORT_COLOR SUPPORT_HUE_EXTENDED = SUPPORT_HUE_COLOR_TEMP | SUPPORT_HUE_COLOR", "= { \"Extended color light\": SUPPORT_HUE_EXTENDED, \"Color light\": SUPPORT_HUE_COLOR, \"Dimmable", "HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a Hue light.\"\"\" def __init__(self, coordinator,", ") # First do a refresh to see if we", "api_item = api[item_id] if is_group: supported_features = 0 for light_id", "\"bulb in the Philips Hue App.\" ) _LOGGER.warning(err, self.name) if", "self.gamut and not color.check_valid_gamut(self.gamut): err = \"Color gamut of %s:", "return min(255, round((value / 254) * 255)) def hass_to_hue_brightness(value): \"\"\"Convert", "we will declare not ready. await light_coordinator.async_refresh() if not light_coordinator.last_update_success:", "entry.\"\"\" bridge = hass.data[HUE_DOMAIN][config_entry.entry_id] api_version = tuple(int(v) for v in", "bridge, is_group, api_item, supported_features, rooms ) async def async_setup_entry(hass, config_entry,", "= self.light.uniqueid if not unique_id and self.is_group and self.light.room: unique_id", "# 1.13 introduced \"any_on\" to group state objects GROUP_MIN_API_VERSION =", "err: await bridge.handle_unauthorized_error() raise UpdateFailed(\"Unauthorized\") from err except aiohue.AiohueException as", "version to support groups # 1.4.0 introduced extended group info", "buls if not min_mireds: return super().min_mireds return min_mireds @property def", "0..255.\"\"\" if self.is_group: bri = self.light.action.get(\"bri\") else: bri = self.light.state.get(\"bri\")", "self.light.id, self.async_write_ha_state ) ) await super().async_added_to_hass() async def async_turn_on(self, **kwargs):", "color.color_xy_to_hs(*source[\"xy\"], self.gamut) return None @property def color_temp(self): \"\"\"Return the CT", "class HueLight(CoordinatorEntity, LightEntity): \"\"\"Representation of a Hue light.\"\"\" def __init__(self,", "in kwargs: command[\"transitiontime\"] = int(kwargs[ATTR_TRANSITION] * 10) flash = kwargs.get(ATTR_FLASH)", "support groups # 1.4.0 introduced extended group info # 1.12", "continue for light_id in group.lights: rooms[light_id] = group.name # Once", "if effect == EFFECT_COLORLOOP: command[\"effect\"] = \"colorloop\" elif effect ==", "hub. # Otherwise we will declare not ready. await light_coordinator.async_refresh()", "_setup_rooms_listener(): nonlocal cancel_update_rooms_listener if cancel_update_rooms_listener is not None: # If", "current effect.\"\"\" return self.light.state.get(\"effect\", None) @property def effect_list(self): \"\"\"Return the", "if is_group: supported_features = 0 for light_id in api_item.lights: if" ]
[ "Mac`_. Args: start_point: start point as :class:`Vec2` compatible object end_point:", "radius) \"\"\" r = signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point,", "Any, angle: float, distance: float) -> Vec2: \"\"\" Returns the", "bulge: float) -> float: \"\"\" Returns radius of arc defined", "parameters. Based on Bulge Center by `<NAME>`_. Args: start_point: start", "Bulge by `Lee Mac`_. Args: start_point: start point as :class:`Vec2`", "radius Returns: tuple: (start_point, end_point, bulge) \"\"\" start_point = polar(center,", "Based on Bulge Radius by `<NAME>`_ Args: start_point: start point", "end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') ->", "p2: Any) -> float: \"\"\" Returns angle a line defined", "source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING,", "\"arc_to_bulge\" ] def polar(p: Any, angle: float, distance: float) ->", "Vec2: \"\"\" Returns the point at a specified `angle` and", "end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) ->", "by `<NAME>`_ Args: start_point: start point as :class:`Vec2` compatible object", "end_angle: end angle in radians radius: circle radius Returns: tuple:", "point as :class:`Vec2` compatible object start_angle: start angle in radians", "start_point), abs(r) else: return c, angle(c, start_point), angle(c, end_point), abs(r)", "of arc described by the given bulge parameters. Based on", "# source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple import", "arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius: float) -> Tuple['Vec2',", "point: arbitrary point as :class:`Vec2` compatible object \"\"\" a =", "Based on Bulge Center by `<NAME>`_. Args: start_point: start point", "\"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def polar(p: Any, angle: float,", "float]: \"\"\" Returns bulge parameters from arc parameters. Args: center:", "Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge * bulge)) / 4.", "entities start at the vertex which includes the bulge value", "arbitrary point as :class:`Vec2` compatible object \"\"\" a = (math.pi", "defined by the given bulge parameters. Based on Bulge Radius", "= [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def polar(p:", "(Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float,", "float, float]: \"\"\" Returns arc parameters from bulge parameters. The", "pi2 = math.pi * 2 a = math.fmod((pi2 + (end_angle", "2D :class:`~ezdxf.entities.Polyline` entities start at the vertex which includes the", "Returns bulge parameters from arc parameters. Args: center: circle center", "compatible object bulge: bulge value Returns: Tuple: (center, start_angle, end_angle,", "point as :class:`Vec2` compatible object bulge: bulge value \"\"\" return", "as :class:`Vec2` compatible object \"\"\" a = (math.pi - angle(point,", "/ math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) ->", "4. bulge = math.sin(a) / math.cos(a) return start_point, end_point, bulge", "return Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge * bulge)) /", "MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing", "compatible object end_point: end vertex as :class:`Vec2` compatible object bulge:", "ezdxf.eztypes import Vertex __all__ = [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\",", "value as float \"\"\" start_point = Vec2(start_point) a = angle(start_point,", "Returns bulge value defined by three points. Based on 3-Points", "def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') -> float: \"\"\"", "/ math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point:", "-> Vec2: \"\"\" Returns the point at a specified `angle`", "start at the vertex which includes the bulge value and", ":class:`Vec2` compatible object bulge: bulge value Returns: Tuple: (center, start_angle,", "start_angle: start angle in radians end_angle: end angle in radians", "( 1. + (bulge * bulge)) / 4. / bulge", "Args: p1: start point as :class:`Vec2` compatible object p2: end", "+ (end_angle - start_angle)), pi2) / 4. bulge = math.sin(a)", "radius: float) -> Tuple['Vec2', 'Vec2', float]: \"\"\" Returns bulge parameters", "includes the bulge value and ends at the following vertex.", "and 2D :class:`~ezdxf.entities.Polyline` entities start at the vertex which includes", "'Vertex', start_angle: float, end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2',", "bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: \"\"\" Returns", "\"bulge_radius\", \"arc_to_bulge\" ] def polar(p: Any, angle: float, distance: float)", "ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__", "Vertex __all__ = [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ]", "\"\"\" Returns radius of arc defined by the given bulge", "math.sin(a) / math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex',", "described by the given bulge parameters. Based on Bulge Center", "2. - math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point,", "end point as :class:`Vec2` compatible object bulge: bulge value as", "c, angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point:", "point: 'Vertex') -> float: \"\"\" Returns bulge value defined by", "Tuple['Vec2', float, float, float]: \"\"\" Returns arc parameters from bulge", "point as :class:`Vec2` compatible object end_point: end point as :class:`Vec2`", "parameters. The arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and", "- Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius:", "float) -> 'Vec2': \"\"\" Returns center of arc described by", "-> float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge *", "TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ = [ \"bulge_to_arc\", \"bulge_3_points\",", "end_point) + (math.pi / 2 - math.atan(bulge) * 2) c", "'Vertex', bulge: float) -> float: \"\"\" Returns radius of arc", "vertex as :class:`Vec2` compatible object bulge: bulge value Returns: Tuple:", "from ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex", "# Copyright (c) 2018-2021 <NAME> # License: MIT License #", "License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from", "License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import", "object \"\"\" return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle:", "'Vertex') -> float: \"\"\" Returns bulge value defined by three", "Radius by `<NAME>`_ Args: start_point: start point as :class:`Vec2` compatible", "in radians distance: distance \"\"\" return Vec2(p) + Vec2.from_angle(angle, distance)", "abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2':", "Based on 3-Points to Bulge by `Lee Mac`_. Args: start_point:", "by `Lee Mac`_. Args: start_point: start vertex as :class:`Vec2` compatible", "`angle` and `distance` from point `p`. Args: p: point as", "compatible object bulge: bulge value \"\"\" return abs(signed_bulge_radius(start_point, end_point, bulge))", "import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ =", ":class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible object", "float: \"\"\" Returns radius of arc defined by the given", "as :class:`Vec2` compatible object bulge: bulge value Returns: Tuple: (center,", "center of arc described by the given bulge parameters. Based", "`Lee Mac`_. Args: start_point: start vertex as :class:`Vec2` compatible object", "/ 4. bulge = math.sin(a) / math.cos(a) return start_point, end_point,", "Returns: Tuple: (center, start_angle, end_angle, radius) \"\"\" r = signed_bulge_radius(start_point,", "import math from ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes", "on Bulge Radius by `<NAME>`_ Args: start_point: start point as", "'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2': \"\"\" Returns center", "math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex',", "value Returns: Tuple: (center, start_angle, end_angle, radius) \"\"\" r =", "float) -> Vec2: \"\"\" Returns the point at a specified", "= signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point) + (math.pi", "bulge < 0: return c, angle(c, end_point), angle(c, start_point), abs(r)", "tuple: (start_point, end_point, bulge) \"\"\" start_point = polar(center, start_angle, radius)", "= math.sin(a) / math.cos(a) return start_point, end_point, bulge def bulge_3_points(start_point:", "Copyright (c) 2018-2021 <NAME> # License: MIT License # source:", "2 a = math.fmod((pi2 + (end_angle - start_angle)), pi2) /", "p2: end point as :class:`Vec2` compatible object \"\"\" return (Vec2(p2)", "compatible object point: arbitrary point as :class:`Vec2` compatible object \"\"\"", ":class:`Vec2` compatible object start_angle: start angle in radians end_angle: end", "* ( 1. + (bulge * bulge)) / 4. /", "- angle(point, start_point) + angle(point, end_point)) / 2 return math.sin(a)", "angle in radians radius: circle radius Returns: tuple: (start_point, end_point,", "angle(point, start_point) + angle(point, end_point)) / 2 return math.sin(a) /", "The arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D", "of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at the vertex", "arc parameters. Args: center: circle center point as :class:`Vec2` compatible", "endpoints and x-axis in radians. Args: p1: start point as", "def polar(p: Any, angle: float, distance: float) -> Vec2: \"\"\"", "end_angle, radius) pi2 = math.pi * 2 a = math.fmod((pi2", "end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2', float]: \"\"\" Returns", "Arc by `Lee Mac`_. Args: start_point: start vertex as :class:`Vec2`", "\"\"\" Returns angle a line defined by two endpoints and", "c, angle(c, end_point), angle(c, start_point), abs(r) else: return c, angle(c,", "float: \"\"\" Returns bulge value defined by three points. Based", "start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex')", "start angle in radians end_angle: end angle in radians radius:", "angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float)", "point at a specified `angle` and `distance` from point `p`.", "point as :class:`Vec2` compatible object bulge: bulge value as float", "= angle(start_point, end_point) + (math.pi / 2. - math.atan(bulge) *", "bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1. +", "+ (bulge * bulge)) / 4. / bulge def bulge_radius(start_point:", "polar(center, start_angle, radius) end_point = polar(center, end_angle, radius) pi2 =", "the vertex which includes the bulge value and ends at", "bulge value defined by three points. Based on 3-Points to", "\"\"\" Returns arc parameters from bulge parameters. The arcs defined", "(math.pi / 2. - math.atan(bulge) * 2.) return start_point +", "a, r) if bulge < 0: return c, angle(c, end_point),", "def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: \"\"\"", "Any) -> float: \"\"\" Returns angle a line defined by", "bulge: float) -> 'Vec2': \"\"\" Returns center of arc described", "start_point: start vertex as :class:`Vec2` compatible object end_point: end vertex", "math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float)", "math.pi * 2 a = math.fmod((pi2 + (end_angle - start_angle)),", "start_angle, end_angle, radius) \"\"\" r = signed_bulge_radius(start_point, end_point, bulge) a", "\"\"\" Returns bulge value defined by three points. Based on", "given bulge parameters. Based on Bulge Radius by `<NAME>`_ Args:", "angle: float, distance: float) -> Vec2: \"\"\" Returns the point", "(c) 2018-2021 <NAME> # License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html", "(start_point, end_point, bulge) \"\"\" start_point = polar(center, start_angle, radius) end_point", "end_point) + (math.pi / 2. - math.atan(bulge) * 2.) return", "4. / bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float)", "2018-2021 <NAME> # License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html #", "'Vertex', end_point: 'Vertex', bulge: float) -> float: \"\"\" Returns radius", "def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float,", "compatible object \"\"\" return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex',", "return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge:", ":class:`Vec2` compatible object end_point: end vertex as :class:`Vec2` compatible object", "start_point = polar(center, start_angle, radius) end_point = polar(center, end_angle, radius)", "end point as :class:`Vec2` compatible object \"\"\" return (Vec2(p2) -", "to Bulge by `Lee Mac`_. Args: start_point: start point as", "angle(start_point, end_point) + (math.pi / 2 - math.atan(bulge) * 2)", "def angle(p1: Any, p2: Any) -> float: \"\"\" Returns angle", "bulge) \"\"\" start_point = polar(center, start_angle, radius) end_point = polar(center,", "to Arc by `Lee Mac`_. Args: start_point: start vertex as", "/ 2 - math.atan(bulge) * 2) c = polar(start_point, a,", "object point: arbitrary point as :class:`Vec2` compatible object \"\"\" a", "object p2: end point as :class:`Vec2` compatible object \"\"\" return", "return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex',", "parameters from arc parameters. Args: center: circle center point as", "Returns: tuple: (start_point, end_point, bulge) \"\"\" start_point = polar(center, start_angle,", "bulge value and ends at the following vertex. Based on", "the given bulge parameters. Based on Bulge Radius by `<NAME>`_", "= math.pi * 2 a = math.fmod((pi2 + (end_angle -", "bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float:", "bulge = math.sin(a) / math.cos(a) return start_point, end_point, bulge def", "line defined by two endpoints and x-axis in radians. Args:", "compatible object start_angle: start angle in radians end_angle: end angle", "end_point, bulge) a = angle(start_point, end_point) + (math.pi / 2", "in radians. Args: p1: start point as :class:`Vec2` compatible object", "pi2) / 4. bulge = math.sin(a) / math.cos(a) return start_point,", "\"\"\" start_point = polar(center, start_angle, radius) end_point = polar(center, end_angle,", "return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any)", "by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start", "Bulge Radius by `<NAME>`_ Args: start_point: start point as :class:`Vec2`", "radius) pi2 = math.pi * 2 a = math.fmod((pi2 +", "-> Tuple['Vec2', float, float, float]: \"\"\" Returns arc parameters from", "+ angle(point, end_point)) / 2 return math.sin(a) / math.cos(a) def", "the following vertex. Based on Bulge to Arc by `Lee", "arc described by the given bulge parameters. Based on Bulge", "float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge * bulge))", "\"\"\" start_point = Vec2(start_point) a = angle(start_point, end_point) + (math.pi", "polar(start_point, a, r) if bulge < 0: return c, angle(c,", "distance: distance \"\"\" return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1:", "bulge: bulge value as float \"\"\" start_point = Vec2(start_point) a", "0: return c, angle(c, end_point), angle(c, start_point), abs(r) else: return", "end_point: end point as :class:`Vec2` compatible object point: arbitrary point", ":class:`Vec2` compatible object bulge: bulge value \"\"\" return abs(signed_bulge_radius(start_point, end_point,", "\"\"\" Returns bulge parameters from arc parameters. Args: center: circle", "by three points. Based on 3-Points to Bulge by `Lee", "the bulge value and ends at the following vertex. Based", "as :class:`Vec2` compatible object p2: end point as :class:`Vec2` compatible", "\"\"\" a = (math.pi - angle(point, start_point) + angle(point, end_point))", "angle in radians end_angle: end angle in radians radius: circle", "Args: start_point: start point as :class:`Vec2` compatible object end_point: end", "bulge parameters. Based on Bulge Radius by `<NAME>`_ Args: start_point:", "p1: start point as :class:`Vec2` compatible object p2: end point", "bulge parameters from arc parameters. Args: center: circle center point", "'Vertex', end_point: 'Vertex', bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point)) *", "as :class:`Vec2` compatible object bulge: bulge value \"\"\" return abs(signed_bulge_radius(start_point,", "bulge parameters. Based on Bulge Center by `<NAME>`_. Args: start_point:", "in radians radius: circle radius Returns: tuple: (start_point, end_point, bulge)", "bulge)) / 4. / bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex',", "bulge: bulge value Returns: Tuple: (center, start_angle, end_angle, radius) \"\"\"", "end_point: 'Vertex', point: 'Vertex') -> float: \"\"\" Returns bulge value", "defined by three points. Based on 3-Points to Bulge by", "and x-axis in radians. Args: p1: start point as :class:`Vec2`", "import Any, TYPE_CHECKING, Tuple import math from ezdxf.math import Vec2", "start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge:", "and ends at the following vertex. Based on Bulge to", "/ 4. / bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge:", "object start_angle: start angle in radians end_angle: end angle in", "angle: angle in radians distance: distance \"\"\" return Vec2(p) +", "end_point = polar(center, end_angle, radius) pi2 = math.pi * 2", "by `<NAME>`_. Args: start_point: start point as :class:`Vec2` compatible object", "two endpoints and x-axis in radians. Args: p1: start point", "circle radius Returns: tuple: (start_point, end_point, bulge) \"\"\" start_point =", "arc parameters from bulge parameters. The arcs defined by bulge", "compatible object p2: end point as :class:`Vec2` compatible object \"\"\"", "values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at the", "end_point, bulge) \"\"\" start_point = polar(center, start_angle, radius) end_point =", "'Vertex', bulge: float) -> Tuple['Vec2', float, float, float]: \"\"\" Returns", "in radians end_angle: end angle in radians radius: circle radius", "angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex',", "arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline`", "`<NAME>`_ Args: start_point: start point as :class:`Vec2` compatible object end_point:", "three points. Based on 3-Points to Bulge by `Lee Mac`_.", "defined by two endpoints and x-axis in radians. Args: p1:", "else: return c, angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point:", "`distance` from point `p`. Args: p: point as :class:`Vec2` compatible", "- math.atan(bulge) * 2) c = polar(start_point, a, r) if", "(center, start_angle, end_angle, radius) \"\"\" r = signed_bulge_radius(start_point, end_point, bulge)", "= polar(center, start_angle, radius) end_point = polar(center, end_angle, radius) pi2", "1. + (bulge * bulge)) / 4. / bulge def", "`<NAME>`_. Args: start_point: start point as :class:`Vec2` compatible object end_point:", "http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple", "return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle:", "\"\"\" return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any, p2:", "bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at", "= Vec2(start_point) a = angle(start_point, end_point) + (math.pi / 2.", "math from ezdxf.math import Vec2 if TYPE_CHECKING: from ezdxf.eztypes import", "\"\"\" Returns center of arc described by the given bulge", "-> float: \"\"\" Returns radius of arc defined by the", "radians radius: circle radius Returns: tuple: (start_point, end_point, bulge) \"\"\"", ":class:`Vec2` compatible object \"\"\" return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center:", "end vertex as :class:`Vec2` compatible object bulge: bulge value Returns:", "points. Based on 3-Points to Bulge by `Lee Mac`_. Args:", "vertex which includes the bulge value and ends at the", "'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float, float, float]:", "polar(center, end_angle, radius) pi2 = math.pi * 2 a =", "at the vertex which includes the bulge value and ends", "point `p`. Args: p: point as :class:`Vec2` compatible object angle:", "on Bulge to Arc by `Lee Mac`_. Args: start_point: start", "center point as :class:`Vec2` compatible object start_angle: start angle in", "- math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point,", "which includes the bulge value and ends at the following", "given bulge parameters. Based on Bulge Center by `<NAME>`_. Args:", "Vec2 if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ = [", "end_point)) / 2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex',", "end_angle, radius) \"\"\" r = signed_bulge_radius(start_point, end_point, bulge) a =", "start_angle, radius) end_point = polar(center, end_angle, radius) pi2 = math.pi", "Args: center: circle center point as :class:`Vec2` compatible object start_angle:", "as :class:`Vec2` compatible object end_point: end vertex as :class:`Vec2` compatible", "from typing import Any, TYPE_CHECKING, Tuple import math from ezdxf.math", "polar(p: Any, angle: float, distance: float) -> Vec2: \"\"\" Returns", "object bulge: bulge value Returns: Tuple: (center, start_angle, end_angle, radius)", "/ 2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point:", "Tuple: (center, start_angle, end_angle, radius) \"\"\" r = signed_bulge_radius(start_point, end_point,", "def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: return", "'Vertex', bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1.", "from ezdxf.eztypes import Vertex __all__ = [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\",", "parameters. Based on Bulge Radius by `<NAME>`_ Args: start_point: start", "point as :class:`Vec2` compatible object angle: angle in radians distance:", "as :class:`Vec2` compatible object bulge: bulge value as float \"\"\"", "a = math.fmod((pi2 + (end_angle - start_angle)), pi2) / 4.", "-> 'Vec2': \"\"\" Returns center of arc described by the", "'Vertex', bulge: float) -> 'Vec2': \"\"\" Returns center of arc", "the point at a specified `angle` and `distance` from point", "typing import Any, TYPE_CHECKING, Tuple import math from ezdxf.math import", "Returns the point at a specified `angle` and `distance` from", "end_point), angle(c, start_point), abs(r) else: return c, angle(c, start_point), angle(c,", "as :class:`Vec2` compatible object angle: angle in radians distance: distance", "'Vertex', point: 'Vertex') -> float: \"\"\" Returns bulge value defined", "a = angle(start_point, end_point) + (math.pi / 2. - math.atan(bulge)", "circle center point as :class:`Vec2` compatible object start_angle: start angle", ":class:`~ezdxf.entities.Polyline` entities start at the vertex which includes the bulge", "\"\"\" return (Vec2(p2) - Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float,", "on Bulge Center by `<NAME>`_. Args: start_point: start point as", "radians. Args: p1: start point as :class:`Vec2` compatible object p2:", "`Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible object", "radius) end_point = polar(center, end_angle, radius) pi2 = math.pi *", "+ (math.pi / 2 - math.atan(bulge) * 2) c =", "point as :class:`Vec2` compatible object \"\"\" a = (math.pi -", "<gh_stars>0 # Copyright (c) 2018-2021 <NAME> # License: MIT License", "return c, angle(c, end_point), angle(c, start_point), abs(r) else: return c,", "if bulge < 0: return c, angle(c, end_point), angle(c, start_point),", "math.atan(bulge) * 2) c = polar(start_point, a, r) if bulge", "radians end_angle: end angle in radians radius: circle radius Returns:", "def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2': \"\"\"", "float, end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2', float]: \"\"\"", "as :class:`Vec2` compatible object \"\"\" return (Vec2(p2) - Vec2(p1)).angle def", "compatible object angle: angle in radians distance: distance \"\"\" return", "end point as :class:`Vec2` compatible object point: arbitrary point as", "object end_point: end vertex as :class:`Vec2` compatible object bulge: bulge", "Returns center of arc described by the given bulge parameters.", "arc defined by the given bulge parameters. Based on Bulge", "bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> 'Vec2': \"\"\" Returns", "math.fmod((pi2 + (end_angle - start_angle)), pi2) / 4. bulge =", "a = (math.pi - angle(point, start_point) + angle(point, end_point)) /", "start_angle: float, end_angle: float, radius: float) -> Tuple['Vec2', 'Vec2', float]:", "* 2 a = math.fmod((pi2 + (end_angle - start_angle)), pi2)", "object bulge: bulge value as float \"\"\" start_point = Vec2(start_point)", "c = polar(start_point, a, r) if bulge < 0: return", "+ Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex',", "return start_point, end_point, bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point:", "the given bulge parameters. Based on Bulge Center by `<NAME>`_.", "= polar(center, end_angle, radius) pi2 = math.pi * 2 a", "'Vec2': \"\"\" Returns center of arc described by the given", "by two endpoints and x-axis in radians. Args: p1: start", "float, radius: float) -> Tuple['Vec2', 'Vec2', float]: \"\"\" Returns bulge", "'Vertex', end_point: 'Vertex', point: 'Vertex') -> float: \"\"\" Returns bulge", "Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any) ->", "following vertex. Based on Bulge to Arc by `Lee Mac`_.", "compatible object end_point: end point as :class:`Vec2` compatible object point:", "(end_angle - start_angle)), pi2) / 4. bulge = math.sin(a) /", "Bulge Center by `<NAME>`_. Args: start_point: start point as :class:`Vec2`", "radius: circle radius Returns: tuple: (start_point, end_point, bulge) \"\"\" start_point", "TYPE_CHECKING, Tuple import math from ezdxf.math import Vec2 if TYPE_CHECKING:", "Vec2(start_point) a = angle(start_point, end_point) + (math.pi / 2. -", "Based on Bulge to Arc by `Lee Mac`_. Args: start_point:", "\"\"\" r = signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point)", "end_point: end vertex as :class:`Vec2` compatible object bulge: bulge value", "Center by `<NAME>`_. Args: start_point: start point as :class:`Vec2` compatible", ":class:`Vec2` compatible object p2: end point as :class:`Vec2` compatible object", "2) c = polar(start_point, a, r) if bulge < 0:", "/ 2. - math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a,", "float]: \"\"\" Returns arc parameters from bulge parameters. The arcs", "signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float)", "a = angle(start_point, end_point) + (math.pi / 2 - math.atan(bulge)", "start_point: start point as :class:`Vec2` compatible object end_point: end point", "point as :class:`Vec2` compatible object p2: end point as :class:`Vec2`", "a line defined by two endpoints and x-axis in radians.", "<NAME> # License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source:", "* 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def", "Returns angle a line defined by two endpoints and x-axis", "float \"\"\" start_point = Vec2(start_point) a = angle(start_point, end_point) +", ":class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities start at the vertex which", "float: \"\"\" Returns angle a line defined by two endpoints", "start point as :class:`Vec2` compatible object end_point: end point as", "angle in radians distance: distance \"\"\" return Vec2(p) + Vec2.from_angle(angle,", "bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float:", "(bulge * bulge)) / 4. / bulge def bulge_radius(start_point: 'Vertex',", "(math.pi / 2 - math.atan(bulge) * 2) c = polar(start_point,", "x-axis in radians. Args: p1: start point as :class:`Vec2` compatible", "by the given bulge parameters. Based on Bulge Radius by", "from arc parameters. Args: center: circle center point as :class:`Vec2`", "object end_point: end point as :class:`Vec2` compatible object bulge: bulge", "and `distance` from point `p`. Args: p: point as :class:`Vec2`", "end_point: 'Vertex', bulge: float) -> float: \"\"\" Returns radius of", "angle(point, end_point)) / 2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point:", "Args: p: point as :class:`Vec2` compatible object angle: angle in", "object end_point: end point as :class:`Vec2` compatible object point: arbitrary", "compatible object \"\"\" a = (math.pi - angle(point, start_point) +", "angle(p1: Any, p2: Any) -> float: \"\"\" Returns angle a", "2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point:", "source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple import math", "[ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def polar(p: Any,", "/ bulge def bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) ->", ":class:`Vec2` compatible object angle: angle in radians distance: distance \"\"\"", "end_point: 'Vertex', bulge: float) -> 'Vec2': \"\"\" Returns center of", "radians distance: distance \"\"\" return Vec2(p) + Vec2.from_angle(angle, distance) def", "ends at the following vertex. Based on Bulge to Arc", "float, distance: float) -> Vec2: \"\"\" Returns the point at", "# License: MIT License # source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm", "\"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def polar(p: Any, angle: float, distance:", "< 0: return c, angle(c, end_point), angle(c, start_point), abs(r) else:", "bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float, float,", "center: circle center point as :class:`Vec2` compatible object start_angle: start", "abs(r) else: return c, angle(c, start_point), angle(c, end_point), abs(r) def", "angle(start_point, end_point) + (math.pi / 2. - math.atan(bulge) * 2.)", "point as :class:`Vec2` compatible object \"\"\" return (Vec2(p2) - Vec2(p1)).angle", "Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any) -> float: \"\"\"", "at a specified `angle` and `distance` from point `p`. Args:", "distance: float) -> Vec2: \"\"\" Returns the point at a", "def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius: float) ->", "by `Lee Mac`_. Args: start_point: start point as :class:`Vec2` compatible", "value defined by three points. Based on 3-Points to Bulge", "angle(c, end_point), angle(c, start_point), abs(r) else: return c, angle(c, start_point),", "Returns arc parameters from bulge parameters. The arcs defined by", "end_point: 'Vertex', bulge: float) -> Tuple['Vec2', float, float, float]: \"\"\"", "- start_angle)), pi2) / 4. bulge = math.sin(a) / math.cos(a)", "Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge:", "`p`. Args: p: point as :class:`Vec2` compatible object angle: angle", "+ (math.pi / 2. - math.atan(bulge) * 2.) return start_point", "math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> Tuple['Vec2',", "distance) def angle(p1: Any, p2: Any) -> float: \"\"\" Returns", "bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') -> float: \"\"\" Returns", "-> float: \"\"\" Returns angle a line defined by two", "return c, angle(c, start_point), angle(c, end_point), abs(r) def bulge_center(start_point: 'Vertex',", "float) -> Tuple['Vec2', 'Vec2', float]: \"\"\" Returns bulge parameters from", "Any, TYPE_CHECKING, Tuple import math from ezdxf.math import Vec2 if", "end_point: 'Vertex', bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point)) * (", "distance \"\"\" return Vec2(p) + Vec2.from_angle(angle, distance) def angle(p1: Any,", "start_point = Vec2(start_point) a = angle(start_point, end_point) + (math.pi /", "value and ends at the following vertex. Based on Bulge", "+ Vec2.from_angle(angle, distance) def angle(p1: Any, p2: Any) -> float:", ":class:`Vec2` compatible object \"\"\" a = (math.pi - angle(point, start_point)", "= math.fmod((pi2 + (end_angle - start_angle)), pi2) / 4. bulge", ":class:`Vec2` compatible object point: arbitrary point as :class:`Vec2` compatible object", "parameters. Args: center: circle center point as :class:`Vec2` compatible object", "Any, p2: Any) -> float: \"\"\" Returns angle a line", "by the given bulge parameters. Based on Bulge Center by", "as float \"\"\" start_point = Vec2(start_point) a = angle(start_point, end_point)", "-> float: \"\"\" Returns bulge value defined by three points.", "Vec2(p1)).angle def arc_to_bulge(center: 'Vertex', start_angle: float, end_angle: float, radius: float)", "= (math.pi - angle(point, start_point) + angle(point, end_point)) / 2", "of arc defined by the given bulge parameters. Based on", "signed_bulge_radius(start_point: 'Vertex', end_point: 'Vertex', bulge: float) -> float: return Vec2(start_point).distance(Vec2(end_point))", "Tuple import math from ezdxf.math import Vec2 if TYPE_CHECKING: from", "specified `angle` and `distance` from point `p`. Args: p: point", "as :class:`Vec2` compatible object start_angle: start angle in radians end_angle:", "bulge: float) -> Tuple['Vec2', float, float, float]: \"\"\" Returns arc", "as :class:`Vec2` compatible object point: arbitrary point as :class:`Vec2` compatible", "object angle: angle in radians distance: distance \"\"\" return Vec2(p)", "p: point as :class:`Vec2` compatible object angle: angle in radians", "vertex as :class:`Vec2` compatible object end_point: end vertex as :class:`Vec2`", "Tuple['Vec2', 'Vec2', float]: \"\"\" Returns bulge parameters from arc parameters.", "on 3-Points to Bulge by `Lee Mac`_. Args: start_point: start", "r) if bulge < 0: return c, angle(c, end_point), angle(c,", "] def polar(p: Any, angle: float, distance: float) -> Vec2:", "start vertex as :class:`Vec2` compatible object end_point: end vertex as", "start_angle)), pi2) / 4. bulge = math.sin(a) / math.cos(a) return", "http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any, TYPE_CHECKING, Tuple import math from", "math.atan(bulge) * 2.) return start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge))", "= polar(start_point, a, r) if bulge < 0: return c,", "bulge parameters. The arcs defined by bulge values of :class:`~ezdxf.entities.LWPolyline`", "__all__ = [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def", "a specified `angle` and `distance` from point `p`. Args: p:", "if TYPE_CHECKING: from ezdxf.eztypes import Vertex __all__ = [ \"bulge_to_arc\",", "start_point) + angle(point, end_point)) / 2 return math.sin(a) / math.cos(a)", "float) -> float: return Vec2(start_point).distance(Vec2(end_point)) * ( 1. + (bulge", "radius of arc defined by the given bulge parameters. Based", "signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point) + (math.pi /", "at the following vertex. Based on Bulge to Arc by", "end_point: end point as :class:`Vec2` compatible object bulge: bulge value", ":class:`Vec2` compatible object bulge: bulge value as float \"\"\" start_point", "Args: start_point: start vertex as :class:`Vec2` compatible object end_point: end", "vertex. Based on Bulge to Arc by `Lee Mac`_. Args:", "2 - math.atan(bulge) * 2) c = polar(start_point, a, r)", "Bulge to Arc by `Lee Mac`_. Args: start_point: start vertex", "start_point + Vec2.from_angle(a, signed_bulge_radius(start_point, end_point, bulge)) def signed_bulge_radius(start_point: 'Vertex', end_point:", "r = signed_bulge_radius(start_point, end_point, bulge) a = angle(start_point, end_point) +", "# source: http://www.lee-mac.com/bulgeconversion.html # source: http://www.afralisp.net/archive/lisp/Bulges1.htm from typing import Any,", "point as :class:`Vec2` compatible object point: arbitrary point as :class:`Vec2`", "angle(c, start_point), abs(r) else: return c, angle(c, start_point), angle(c, end_point),", "import Vertex __all__ = [ \"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\"", "bulge value as float \"\"\" start_point = Vec2(start_point) a =", "'Vec2', float]: \"\"\" Returns bulge parameters from arc parameters. Args:", "compatible object end_point: end point as :class:`Vec2` compatible object bulge:", "(math.pi - angle(point, start_point) + angle(point, end_point)) / 2 return", "* bulge)) / 4. / bulge def bulge_radius(start_point: 'Vertex', end_point:", "defined by bulge values of :class:`~ezdxf.entities.LWPolyline` and 2D :class:`~ezdxf.entities.Polyline` entities", "float) -> float: \"\"\" Returns radius of arc defined by", "Returns radius of arc defined by the given bulge parameters.", "3-Points to Bulge by `Lee Mac`_. Args: start_point: start point", "bulge value Returns: Tuple: (center, start_angle, end_angle, radius) \"\"\" r", "from bulge parameters. The arcs defined by bulge values of", "-> Tuple['Vec2', 'Vec2', float]: \"\"\" Returns bulge parameters from arc", "end_point), abs(r) def bulge_center(start_point: 'Vertex', end_point: 'Vertex', bulge: float) ->", "Mac`_. Args: start_point: start vertex as :class:`Vec2` compatible object end_point:", "end angle in radians radius: circle radius Returns: tuple: (start_point,", "2 return math.sin(a) / math.cos(a) def bulge_to_arc(start_point: 'Vertex', end_point: 'Vertex',", "parameters from bulge parameters. The arcs defined by bulge values", "\"bulge_to_arc\", \"bulge_3_points\", \"bulge_center\", \"bulge_radius\", \"arc_to_bulge\" ] def polar(p: Any, angle:", "start point as :class:`Vec2` compatible object p2: end point as", "object \"\"\" a = (math.pi - angle(point, start_point) + angle(point,", "bulge def bulge_3_points(start_point: 'Vertex', end_point: 'Vertex', point: 'Vertex') -> float:", "* 2) c = polar(start_point, a, r) if bulge <", "float) -> Tuple['Vec2', float, float, float]: \"\"\" Returns arc parameters", "end point as :class:`Vec2` compatible object bulge: bulge value \"\"\"", "from point `p`. Args: p: point as :class:`Vec2` compatible object", "float, float, float]: \"\"\" Returns arc parameters from bulge parameters.", "compatible object bulge: bulge value as float \"\"\" start_point =", "= angle(start_point, end_point) + (math.pi / 2 - math.atan(bulge) *", "bulge) a = angle(start_point, end_point) + (math.pi / 2 -", "as :class:`Vec2` compatible object end_point: end point as :class:`Vec2` compatible", "angle a line defined by two endpoints and x-axis in", "\"\"\" Returns the point at a specified `angle` and `distance`" ]
[ "Java APIs .jars in this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\"", "template in the editor. #if __name__ == \"__main__\": # print", "os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need to", "header, choose License Headers in Project Properties. # To change", "__name__ == \"__main__\": # print \"Hello World\" from ProgrammingEmail import", "\"Hello World\" from ProgrammingEmail import ManageAttachments import jpype import os.path", "choose License Headers in Project Properties. # To change this", "Headers in Project Properties. # To change this template file,", "World\" from ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath", "ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir", "\"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need to put", "print \"You need to put your Aspose.Email for Java APIs", "in the editor. #if __name__ == \"__main__\": # print \"Hello", "\"You need to put your Aspose.Email for Java APIs .jars", "folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" % asposeapispath) hw = ManageAttachments(dataDir)", "this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" % asposeapispath) hw =", "<gh_stars>10-100 # To change this license header, choose License Headers", "dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need to put your", "template file, choose Tools | Templates # and open the", "Templates # and open the template in the editor. #if", "in Project Properties. # To change this template file, choose", "from ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath =", "= os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need to put your Aspose.Email", "asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You", "open the template in the editor. #if __name__ == \"__main__\":", "change this template file, choose Tools | Templates # and", "the editor. #if __name__ == \"__main__\": # print \"Hello World\"", "jpype import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"),", ".jars in this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" % asposeapispath)", "editor. #if __name__ == \"__main__\": # print \"Hello World\" from", "# print \"Hello World\" from ProgrammingEmail import ManageAttachments import jpype", "this license header, choose License Headers in Project Properties. #", "APIs .jars in this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" %", "put your Aspose.Email for Java APIs .jars in this folder:\\n\"+asposeapispath", "os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need to put your Aspose.Email for", "\"data/\") print \"You need to put your Aspose.Email for Java", "# To change this license header, choose License Headers in", "# To change this template file, choose Tools | Templates", "to put your Aspose.Email for Java APIs .jars in this", "#print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" % asposeapispath) hw = ManageAttachments(dataDir) hw.main()", "import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\")", "os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print", "#if __name__ == \"__main__\": # print \"Hello World\" from ProgrammingEmail", "Project Properties. # To change this template file, choose Tools", "Tools | Templates # and open the template in the", "# and open the template in the editor. #if __name__", "import jpype import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir =", "this template file, choose Tools | Templates # and open", "== \"__main__\": # print \"Hello World\" from ProgrammingEmail import ManageAttachments", "Properties. # To change this template file, choose Tools |", "for Java APIs .jars in this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(),", "file, choose Tools | Templates # and open the template", "import ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"), \"lib/\")", "= os.path.join(os.path.abspath(\"./../../../\"), \"lib/\") dataDir = os.path.join(os.path.abspath(\"./\"), \"data/\") print \"You need", "To change this license header, choose License Headers in Project", "the template in the editor. #if __name__ == \"__main__\": #", "need to put your Aspose.Email for Java APIs .jars in", "| Templates # and open the template in the editor.", "choose Tools | Templates # and open the template in", "\"__main__\": # print \"Hello World\" from ProgrammingEmail import ManageAttachments import", "Aspose.Email for Java APIs .jars in this folder:\\n\"+asposeapispath #print dataDir", "and open the template in the editor. #if __name__ ==", "change this license header, choose License Headers in Project Properties.", "in this folder:\\n\"+asposeapispath #print dataDir jpype.startJVM(jpype.getDefaultJVMPath(), \"-Djava.ext.dirs=%s\" % asposeapispath) hw", "print \"Hello World\" from ProgrammingEmail import ManageAttachments import jpype import", "License Headers in Project Properties. # To change this template", "To change this template file, choose Tools | Templates #", "license header, choose License Headers in Project Properties. # To", "ProgrammingEmail import ManageAttachments import jpype import os.path asposeapispath = os.path.join(os.path.abspath(\"./../../../\"),", "your Aspose.Email for Java APIs .jars in this folder:\\n\"+asposeapispath #print" ]
[ "cursor_position = buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) <", "a = 1 ... ... In [2]: a ... Out[2]:", "<= buffer.cursor_position: break p = pos event.current_buffer.cursor_position = p #", "not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg @r.add_binding(Keys.Up)", "__version__ as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from", "'] ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS", "= [r'>>>\\ '] + [re.escape(i) + r'\\[\\d+\\]:\\ ' for i,", "buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def", "character could be # - or _ for i, c", "state is the original text. The last element is the", "it will not # match (because all \\r's # have", "< buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if", "@r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\", "\"\"\" buffer = event.app.current_buffer # Avoid issues when text grows", "and pks[0].key == \"escape\" and isinstance(pks[1].key, str) and pks[1].key in", "' ') # Strip prompts off pasted text document =", "if (not blank_lines_before and blank_lines_after) or blank_lines_before + blank_lines_after ==", "def delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor)", "0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]):", "ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash", "is # merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e:", "= event.current_buffer if buffer.selection_state: buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX,", "correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer if buffer.selection_state: buffer.selection_state", "# position out of sync cursor_position = buffer.cursor_position buffer.cursor_position =", "is stripped from the outputs. Example: >>> split_prompts(''' ... In", "False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer", "positions[0] for pos in positions: if pos >= buffer.cursor_position: break", "history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\" Move to the beginning", "# prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence if", "Example: >>> split_prompts(''' ... In [1]: a = 1 ...", "state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1 text =", "https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda", "len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state =", "import CMD_QUEUE data = event.data buffer = event.current_buffer # Be", "end_col = document.translate_index_to_position(to - 1) end_line += 1 else: start_line", "# docstring row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if", "1: new_lines.append('') if row == 0: buffer.cursor_down() row += 1", "Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\"", "?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE", "return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import CMD_QUEUE data", "buffer.cursor_position: break p = pos event.current_buffer.cursor_position = p # This", "iTerm2) seem to paste \\r\\n line endings in a #", "(auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens from .theme", "occurs for unclosed single # quoted strings (which will give", "the outputs. Example: >>> split_prompts(''' ... In [1]: a =", "global, but that's fine, because # we consider any change", "+ len(data)] buffer.text = otext[:ocpos] + data + otext[ocpos +", "0 if min_indent == 0: break if min_indent == float('inf'):", "if match.group('ps1prompt') is not None: return '\\r' + match.group('line') +", "col, msg, m) in warnings: # Handle SyntaxErrorMessage which is", "lines_after_current[blank_lines_after:] # XXX: Emacs always keeps a newline at the", "@r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer if buffer.selection_state: buffer.selection_state =", "before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'(", "position as we last left them. If either of those", "before the line ending, # it should act like insert", "break if min_indent == float('inf'): min_indent = 0 uncomment =", "= None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right that wraps around", "tab should insert whitespace, do that instead of completion. \"\"\"", "buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.Down) def", "3.6 we can do this: # run(copy_command, input=text, encoding='utf-8', check=True)", "(not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'): # If", "they did where we left off. # TODO: Use event.previous_key_sequence", "in lines[1:]: # TODO: Send last chunk as bracketed paste,", "elif blank_lines_before + blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0))", "file=sys.stderr) def system_paste(): if \"Linux\" in platform.platform(): paste_command = ['xsel',", "def indent(event): \"\"\" When tab should insert whitespace, do that", "# Delete up to the tab stop buffer.delete_before_cursor(count=4 + spaces%-4)", "lines.append(line[:min_indent] + '# ' + line[min_indent:]) else: lines.append(line) new_text =", "state and state[-1] != (text, cursor_position): state.clear() if len(state) ==", "m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1)", "# Some terminals (Like iTerm2) seem to paste \\r\\n line", "False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right that", "run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error:", "= new_text else: buffer.text = new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX,", "buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def", "Left that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position -", "LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent, len(indent.group(1))) else: min_indent =", "wraps around in multiline. \"\"\" if event.current_buffer.cursor_position - event.arg >=", "matching: if closing.end == (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1])", "SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE", "== float('inf'): min_indent = 0 uncomment = (all(not line.strip() or", "new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) #", "r\"\"\" Takes text copied from mypython, Python, or IPython session", "line, delete all surrounding blank lines, leaving just one. On", "textwrap import platform def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return", "paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n') data = data.replace('\\r',", "buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS)", "import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters", "- m.end(1) + 1 return event.current_buffer.cursor_position = 0 WORD =", "the text exactly) data = data.replace('\\t', ' ') # Strip", "Keys.Right) def forward_word(event): text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for", "i, line in enumerate(document.lines): if start_line <= i < end_line:", "= buffer.cursor_position pos = None for m in WORD.finditer(text): if", "valid line. Also occurs for unclosed single # quoted strings", "elif len(state) == 2: # Exactly one space at the", "line, delete any immediately following blank lines. \"\"\" buffer =", "if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer = event.current_buffer", "= document.text_after_cursor text_before_cursor = document.text_before_cursor text = buffer.text # isspace", "= 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any,", "'\\n'.join(stripped_before + stripped_after) elif blank_lines_before + blank_lines_after == 0: return", "single # quoted strings (which will give a syntax error)", "terminals (Like iTerm2) seem to paste \\r\\n line endings in", "None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): \"\"\" Based on emacs's", "of the buffer, accept unless we are in a #", "from prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState", "set to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\"", "event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if", "from .mypython import validate_text text = textwrap.dedent(text).strip() + '\\n' text", "else: pos = buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted)", "+ 1 return event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape,", "# lines correctly. # Gives the positions right before one", "document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to -", "prompt_toolkit.key_binding.bindings.named_commands import (accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat", "try: # In Python 3.6 we can do this: #", "with four spaces (C-x C-y will still paste the text", "stripped_after) elif blank_lines_before + blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1,", "buffer = event.current_buffer buffer._show_syntax_warning = True if not positions or", "= new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer =", "will not # match (because all \\r's # have been", "indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event):", "below, keeping the cursor # position out of sync cursor_position", "text \"\"\" text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m", "of bounds from trailing # whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text))", "= event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): if", "# quoted strings (which will give a syntax error) accept_line(event)", "inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if", "m in WORD.finditer(text): pos = m.end(0) if pos > cursor_position:", "@r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\" Move back one", "', '').endswith('\\n'): # If we are at the end of", "m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position = 0", "= 0 for line in lines_after_current: if not line.strip(): blank_lines_after", "= document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to - 1) end_line +=", "+= 1 if multiline and inside_string(text, row, col): # We", "is the same as the default binding # Some terminals", "Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection", "last element is the # buffer text and cursor position", "the CMD_QUEUE (it # breaks things). lines = ['\\n'.join(lines)] break", "buffer.start_selection() buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position", "itself. Just before the line ending, # it should act", "around in multiline. \"\"\" if event.current_buffer.cursor_position - event.arg >= 0:", "single space. On second call, remove all whitespace. On third", "from mypython, Python, or IPython session and returns a list", "= buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if", "CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer =", "stripped_before = lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:]", "3.6 we can do this: # run(paste_command, input=text, encoding='utf-8') p", "text contains no carriage returns (\\r). Trailing whitespace and newlines", "in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for i, line", "one. On nonblank line, delete any immediately following blank lines.", "cursor and replace it with a single space. On second", "cursor_line >= end_line - 1: n_changed -= 2 if uncomment:", "we shouldn't if things look exactly # as they did", "j in emoji + [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts", "for i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) #", "event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P')", "from .mypython import CMD_QUEUE data = event.data buffer = event.current_buffer", "= event.current_buffer.cursor_position for m in WORD.finditer(text): if m.end(0) > cursor_position:", "= m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] #", "= run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b", "# M-[ a g is set to S-Enter in iTerm2", "document.lines[:] if len(new_lines) == 1: new_lines.append('') if row == 0:", "= otext[:ocpos] + data + otext[ocpos + len(overwritten_text):] if move_cursor:", "with no prompt if match.group('ps1prompt') is not None: return '\\r'", "= None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): \"\"\" Based on", "buffer, accept unless we are in a # docstring row,", "positions or positions[-1] <= buffer.cursor_position: return p = positions[-1] for", "document.translate_index_to_position(document.cursor_position) if document.selection: from_, to = document.selection_range() start_line, start_col =", "from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string,", "exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable", "removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version", "Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() #", "BLANK_LINES = re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\"", "KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import", "multiline_enter(event): \"\"\" When not in multiline, execute. When in multiline,", "first character could be # - or _ for i,", "if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer", "cursor_position: pos = m.end(0) - cursor_position break if pos: deleted", "with a single space. On second call, remove all whitespace.", "= document.cursor_position_row new_lines = document.lines[:] if len(new_lines) == 1: new_lines.append('')", "cursor_position -= 1 text = rstripped + lstripped state.append((text, cursor_position))", "= ['xclip', '-selection', 'c'] else: copy_command = ['pbcopy'] try: #", "event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer document =", "buffer.document row = document.cursor_position_row new_lines = document.lines[:] if len(new_lines) ==", "at the end of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def", "== '#' for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines =", "= Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line)", "?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE =", "return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\" Move", "= event.app.current_buffer # Avoid issues when text grows or shrinks", "\"error:\", e, file=sys.stderr) def system_paste(): if \"Linux\" in platform.platform(): paste_command", "as e: print(paste_command[0], \"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW)", "event.current_buffer document = buffer.document row = document.cursor_position_row new_lines = document.lines[:]", "def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event):", "we consider any change to be enough clear the state.", "to trigger autocompletion while typing. \"\"\" # Original text &", "line in enumerate(lines): try: validate_text(line) except SyntaxError: # If there", "buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer", "== 0: # Replace all whitespace at the cursor (if", "buffer.cursor_position: return p = positions[0] for pos in positions: if", "be # - or _ for i, c in enumerate(word):", "whitespace. On third call, restore the original whitespace and cursor", "buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next =", "settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] =", "merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def", "current_line_indent, # Don't indent the first line, it's already indented", "text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row += 1", "False): buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer", "be whitespace because of the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor", "delete_blank_lines(event): \"\"\" On blank line, delete all surrounding blank lines,", "down because down is already at the end of the", "shrinks below, keeping the cursor # position out of sync", "== 2: # Exactly one space at the cursor. Remove", "!= '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash,", "things look exactly # as they did where we left", "buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def", "text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor text = buffer.text #", "= Keys.ControlSlash # This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is", "cursor_position = buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text,", "start_line + 1) if cursor_line >= end_line - 1: n_changed", "M-[ a b is set to C-S-/ (C-?) in iTerm2", "the tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) # Reset", "min_indent == 0: break if min_indent == float('inf'): min_indent =", "always keeps a newline at the end of the file,", "@r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\" Move back to the beginning", "Fire `on_text_insert` event. This is mainly used to trigger autocompletion", "prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from", "from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application", "forward_paragraph(event): \"\"\" Move forward one paragraph of text \"\"\" text", "original text. The last element is the # buffer text", "Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer document = buffer.document row", "the prompt is not # matched, this is a special", "... ...: pass ... ...: ... ''') ['a = 1',", "# Don't overwrite the newline itself. Just before the line", "else: start_line = cursor_line end_line = start_line + 1 #", "event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\" in platform.platform(): copy_command =", "buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -=", "# Reset the history search text buffer.history_search_text = None @r.add_binding(Keys.Escape,", "here. if (not blank_lines_before and blank_lines_after) or blank_lines_before + blank_lines_after", "(it # breaks things). lines = ['\\n'.join(lines)] break if '\\n'", "# The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl", "None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event):", "@r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer = event.current_buffer text = buffer.text", "print(copy_command[0], \"error:\", e, file=sys.stderr) def system_paste(): if \"Linux\" in platform.platform():", "lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import CMD_QUEUE", "= None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def", "\"\"\" from .mypython import validate_text text = textwrap.dedent(text).strip() + '\\n'", "positions[-1] <= buffer.cursor_position: return p = positions[-1] for pos in", "= buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert characters", "if not positions or positions[0] >= buffer.cursor_position: return p =", "= buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not up_position: buffer.cursor_position =", "function easier to test. lines = [i.rstrip() for i in", "This is mainly used to trigger autocompletion while typing. \"\"\"", "event.current_buffer text = buffer.text cursor_position = buffer.cursor_position pos = None", "# Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(),", "= True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not", "= re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\" Move", "pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer", "new_text else: buffer.text = new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE)", "we do auto_up, it can be out of bounds from", "if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] + '# '", "otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] + data + otext[ocpos", "from prompt_toolkit import __version__ as prompt_toolkit_version from .multiline import (auto_newline,", "]) r = custom_key_bindings = KeyBindings() def warning_positions(event): document =", "escape all spaces here because of verbose (x) option below", "as the default binding # Some terminals (Like iTerm2) seem", "around the cursor and replace it with a single space.", "On third call, restore the original whitespace and cursor position.", "# run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError:", "= '\\n'.join(stripped_before + stripped_after) elif blank_lines_before + blank_lines_after == 0:", "- start_line + 1) if cursor_line >= end_line - 1:", "delete all surrounding blank lines, leaving just one. On isolated", "will give a syntax error) accept_line(event) else: auto_newline(event.current_buffer) # Always", "= subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error: could not find\",", "+= len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer = event.current_buffer text", "copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1]))", "(C-x C-y will still paste the text exactly) data =", "= \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This won't work", "= len(buffer.document.text) # The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def", "them. If either of those # have changed, reset. The", "+= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD)", "p # This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is", "from around the cursor and replace it with a single", "or _ for i, c in enumerate(word): if c.isalnum(): word", "element is the # buffer text and cursor position as", "2: # Exactly one space at the cursor. Remove it.", "mypython, Python, or IPython session and returns a list of", "in line.rstrip(): lines[i] += '\\n' lines[0] = textwrap.indent(lines[0], indent, #", "multiline: # Always accept a single valid line. Also occurs", "event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next", "position. :param fire_event: Fire `on_text_insert` event. This is mainly used", "= event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow =", "this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except", "\"\"\" Move back one paragraph of text \"\"\" text =", "buffer.document multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor", "select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection()", "or more blank lines BLANK_LINES = re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape,", "bounds from trailing # whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text", "for m in WORD.finditer(text): if m.end(0) > cursor_position: pos =", "to test. lines = [i.rstrip() for i in lines] return", "Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import", "ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next =", "document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event):", "@r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not", "document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor text = buffer.text", "in warnings: # Handle SyntaxErrorMessage which is the same warning", "if '\\n' in line.rstrip(): lines[i] += '\\n' lines[0] = textwrap.indent(lines[0],", "@r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\" Move forward one paragraph of", "up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not up_position: buffer.cursor_position", "indent else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data,", "filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def", "... ... In [2]: a ... Out[2]: 1 ... ...", "- cursor_position break if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape,", "end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] + '#", "right before one or more blank lines BLANK_LINES = re.compile(r'\\S", "Keys.Left) def backward_word(event): \"\"\" Move back one paragraph of text", "new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left)", "min_indent = 0 if min_indent == 0: break if min_indent", "float('inf'): min_indent = 0 uncomment = (all(not line.strip() or line[min_indent]", "text = buffer.text # isspace doesn't respect vacuous truth if", "not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position", "text = rstripped + ' ' + lstripped state.append((text, cursor_position))", "(?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))?", "import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings", "data = event.data buffer = event.current_buffer # Be sure to", "1 text = rstripped + lstripped state.append((text, cursor_position)) elif len(state)", "@r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position", "forward_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape,", "-= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right)", "a list of inputs Outputs are stripped. If no prompts", "think it matters here. if (not blank_lines_before and blank_lines_after) or", "which is the same warning for the whole # line.", "WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text", "to paste \\r\\n line endings in a # bracketed paste.", "and getattr(pks[-1], 'accept_next', False) and ((len(pks) == 1 and pks[0].key", "line.rstrip(): lines[i] += '\\n' lines[0] = textwrap.indent(lines[0], indent, # Don't", "lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import CMD_QUEUE data =", "= 0 uncomment = (all(not line.strip() or line[min_indent] == '#'", "end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple", "prompt if match.group('ps1prompt') is not None: return '\\r' + match.group('line')", "and text_before_cursor.replace(' ', '').endswith('\\n'): # If we are at the", "min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer", "if row == 0: buffer.cursor_down() row += 1 if row", "True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event):", "new_text = '\\n'.join(lines) # TODO: Set the cursor position correctly", "buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow", "import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal", "g is set to S-Enter in iTerm2 settings Keys.ShiftEnter =", "in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0) return", "data.replace('\\t', ' ') # Strip prompts off pasted text document", "'\\n'.join(stripped_before + [''] + stripped_after) # Even though we do", "buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position =", "data + otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data)", "pks[0].key == \"escape\" and isinstance(pks[1].key, str) and pks[1].key in ['p',", "event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank #", "# TODO: Set the cursor position correctly n_changed = 2*(cursor_line", "for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos =", "if c.isalnum(): word = word[:i] + c.capitalize() + word[i+1:].lower() break", "clear the state. The worst that # happens here is", "text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))):", "not positions or positions[0] >= buffer.cursor_position: return p = positions[0]", "(row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return", "document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline and inside_string(text, row, col):", "= buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before =", "On blank line, delete all surrounding blank lines, leaving just", "multiline, execute. When in multiline, try to intelligently add a", "PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl function for re.sub for clearing", "in emoji + [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts =", "event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY)", "accept_after_history_backward(event): pks = event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next', False)", "= event.current_buffer text = buffer.text cursor_position = buffer.cursor_position pos =", "event.current_buffer.insert_text(lines[0]) for text in lines[1:]: # TODO: Send last chunk", "'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\" Move back one paragraph", "import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app", "whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent =", "[i.rstrip() for i in lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event):", "use \\n as line ending. # This part is the", "= event.current_buffer buffer._show_syntax_warning = True if not positions or positions[0]", "lines. \"\"\" buffer = event.app.current_buffer document = buffer.document lines_up_to_current =", "the positions right before one or more blank lines BLANK_LINES", "event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions = warning_positions(event)", "buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped", "getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError,", "could not find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0],", "e, file=sys.stderr) def system_paste(): if \"Linux\" in platform.platform(): paste_command =", "['xclip', '-selection', 'c'] else: copy_command = ['pbcopy'] try: # In", "or _x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0]) for text in", "one. On isolated blank line, delete that one. On nonblank", "save_before=lambda e: False) def undo(event): event.current_buffer.undo() # Need to escape", "paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], \"error:\", e, file=sys.stderr)", "buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer if", "auto_up, it can be out of bounds from trailing #", "select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection()", "right_multiline(event): \"\"\" Right that wraps around in multiline. \"\"\" if", "should act like insert mode. overwritten_text = otext[ocpos:ocpos + len(data)]", "and pks[1].key in ['p', 'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event)", "the line if the previous key was Up # Requires", "'\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE =", "before cursor on the line must be whitespace because of", "\"\"\" # TODO: Remove the lines with no prompt if", "This part is the same as the default binding #", "# released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B']", "buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp)", "import sys import textwrap import platform def get_key_bindings(): # Based", "newlines is stripped from the outputs. Example: >>> split_prompts(''' ...", "line.strip(): blank_lines_before += 1 else: break blank_lines_after = 0 for", "or shrinks below, keeping the cursor # position out of", "@r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text = event.current_buffer.text cursor_position", "nonblank line, delete any immediately following blank lines. \"\"\" buffer", "run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version from .multiline import", "event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0)", "= warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True if not", "it should act like insert mode. overwritten_text = otext[ocpos:ocpos +", "if indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def", "Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the", "- len(rstripped) -1 text = rstripped + ' ' +", "len(state) == 2: # Exactly one space at the cursor.", "sure multilines end in two newlines for i, line in", "stripped. If no prompts are found the text is left", "paste \\r\\n line endings in a # bracketed paste. See:", "document = buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row += 1", "True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down)", "that will match # the empty string. # Otherwise it", "= event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if m.start(0) > cursor_position:", "m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\"", "buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer text", "not line.strip(): blank_lines_after += 1 else: break if not blank_lines_before:", "not down_position: buffer.cursor_position = len(buffer.document.text) # The default doesn't toggle", "= lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs always keeps", "text = buffer.text cursor_position = buffer.cursor_position for m in reversed(list(WORD.finditer(text))):", "The worst that # happens here is that we resume", "event.arg if buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not", "text. The last element is the # buffer text and", "for m in WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position =", "'-b'] else: paste_command = ['pbpaste'] try: # In Python 3.6", "before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -=", "no carriage returns (\\r). Trailing whitespace and newlines is stripped", "remove all whitespace. On third call, restore the original whitespace", "buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row < buffer.document.line_count", "'*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm')", "ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\", "+ stripped_after) # Even though we do auto_up, it can", "True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer", "redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event): event.current_buffer.undo() #", "I don't # think it matters here. if (not blank_lines_before", "with \\r and removes PS2 prompts. \"\"\" # TODO: Remove", "if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX,", "row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching =", "= [i.rstrip() for i in lines] return lines @r.add_binding(Keys.BracketedPaste) def", "(len(pks) == 2 and pks[0].key == \"escape\" and isinstance(pks[1].key, str)", "event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU,", "\"\"\" # Text before cursor on the line must be", "'\\n') data = data.replace('\\r', '\\n') # Replace tabs with four", "ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from", "positions[-1] for pos in reversed(positions): if pos <= buffer.cursor_position: break", "whitespace, do that instead of completion. \"\"\" # Text before", "col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions =", "buffer = event.current_buffer # Be sure to use \\n as", "prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import", "whitespace because of the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text('", "(accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic", "is the same warning for the whole # line. if", "def auto_up(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row", "event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<')", "textwrap.dedent(text).strip() + '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r')", "comment delimiters min_indent = float('inf') for line in document.lines[start_line:end_line]: if", "event. This is mainly used to trigger autocompletion while typing.", "\"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/']", "input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error: could not find\", copy_command[0], file=sys.stderr)", ":param fire_event: Fire `on_text_insert` event. This is mainly used to", "\"\"\" Move to the end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) #", "back to the beginning of the line, ignoring whitespace. \"\"\"", "filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def", "iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM']", "did where we left off. # TODO: Use event.previous_key_sequence instead.", "def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[", "@r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\" Move to the beginning \"\"\"", "to escape all spaces here because of verbose (x) option", "in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos = cursor_position -", "'-selection', 'c'] else: copy_command = ['pbcopy'] try: # In Python", "p = positions[-1] for pos in reversed(positions): if pos <=", "= event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0) if", "of inputs Outputs are stripped. If no prompts are found", "# In Python 3.6 we can do this: # run(copy_command,", "from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS", "= buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position:", "buffer = event.app.current_buffer # Avoid issues when text grows or", "all whitespace (if any) from around the cursor and replace", "left_multiline(event): \"\"\" Left that wraps around in multiline. \"\"\" if", "= document.text_before_cursor text = buffer.text # isspace doesn't respect vacuous", "following blank lines. \"\"\" buffer = event.app.current_buffer document = buffer.document", "def split_prompts(text, indent=''): r\"\"\" Takes text copied from mypython, Python,", "min(min_indent, len(indent.group(1))) else: min_indent = 0 if min_indent == 0:", "buffer.document text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row +=", "if the previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. #", "the previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We", "subprocess.CalledProcessError as e: print(copy_command[0], \"error:\", e, file=sys.stderr) def system_paste(): if", "buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def", "buffer text and cursor position as we last left them.", "textwrap.indent(lines[i], indent) # Extraneous newlines at the end will be", "file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], \"error:\", e, file=sys.stderr) return", "backward_kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position", "= '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event):", "(C-?) in iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] =", "position out of sync cursor_position = buffer.cursor_position buffer.cursor_position = 0", "auto_newline(event.current_buffer) # Always accept the line if the previous key", "will still paste the text exactly) data = data.replace('\\t', '", "= text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first element of", "buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos =", "alone. The resulting text is indented by indent, except for", "c in enumerate(word): if c.isalnum(): word = word[:i] + c.capitalize()", "If the prompt is not # matched, this is a", "end of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks", "if row == len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1] =", "''') ['a = 1', 'a', 'def test():\\n pass'] \"\"\" from", "document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line in", "downcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position", "event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is set", "Trailing whitespace and newlines is stripped from the outputs. Example:", "import run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version from .multiline", "buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def", "prompts Replaces PS1 prompts with \\r and removes PS2 prompts.", "it can be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';')", "# think it matters here. if (not blank_lines_before and blank_lines_after)", "len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text", "KeyBindings() def warning_positions(event): document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals))", "len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer = event.current_buffer text =", "buffer.cursor_position: return p = positions[-1] for pos in reversed(positions): if", "# TODO: Use event.previous_key_sequence instead. if state and state[-1] !=", "prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings,", "= True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def", "kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position", "= buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c')", "['pbpaste'] try: # In Python 3.6 we can do this:", "we can't use the CMD_QUEUE (it # breaks things). lines", "marker group that will match # the empty string. #", "correctly n_changed = 2*(cursor_line - start_line + 1) if cursor_line", "will match # the empty string. # Otherwise it will", "at the end of the file, but I don't #", "Extraneous newlines at the end will be stripped by the", "the line, ignoring whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line before_cursor =", "> cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position =", "event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer", "M-[ a g is set to S-Enter in iTerm2 settings", "easier to test. lines = [i.rstrip() for i in lines]", "spaces = len(buffer.document.current_line_before_cursor) # Delete up to the tab stop", "text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'): # If we", "and isinstance(pks[1].key, str) and pks[1].key in ['p', 'P', 'up', 'down'])):", "for the comment delimiters min_indent = float('inf') for line in", "buffer.text = otext[:ocpos] + data + otext[ocpos + len(overwritten_text):] if", "True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not down_position:", "warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True if not positions", "split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data, current_line_indent, # Don't indent", "= event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete up", "platform def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(),", "'\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer", "from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def", "that instead of completion. \"\"\" # Text before cursor on", "still paste the text exactly) data = data.replace('\\t', ' ')", "buffer.text ocpos = buffer.cursor_position # Don't overwrite the newline itself.", "buffer.cursor_position += down_position if not down_position: buffer.cursor_position = len(buffer.document.text) #", "cursor_position = len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event):", "comment(event): buffer = event.current_buffer document = buffer.document cursor_line, cursor_col =", "buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for", "are found the text is left alone. The resulting text", "not blank_lines_before: stripped_before = lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after", "def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\"", "prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event): event.current_buffer.redo()", "= min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event):", "def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(),", "len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\" Move back", "def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to +", "# Always accept the line if the previous key was", "- cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1) + 1 return", "cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd')", "... In [2]: a ... Out[2]: 1 ... ... In", "filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable)", "cursor on the line must be whitespace because of the", "def select_all(event): buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position =", "0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event):", "text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first element of state", "'d') def kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position", "def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK,", "... Out[2]: 1 ... ... In [3]: def test(): ...", "[textwrap.indent(data, current_line_indent, # Don't indent the first line, it's already", "down is already at the end of the # prompt.", "# line. if m.col != col: continue pos = document.translate_row_col_to_index(row,", "select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection()", "row += 1 if not inside_string(event.current_buffer.text, row, col): indent =", "buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count)", "In [3]: def test(): ... ...: pass ... ...: ...", "-1 text = rstripped + ' ' + lstripped state.append((text,", "buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if", "is set to S-Enter in iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\"", "text = rstripped + lstripped state.append((text, cursor_position)) elif len(state) ==", "enumerate(lines): try: validate_text(line) except SyntaxError: # If there is a", "cursor_line end_line = start_line + 1 # Get the indentation", "= text.split('\\r') # Make sure multilines end in two newlines", "if indent: min_indent = min(min_indent, len(indent.group(1))) else: min_indent = 0", "buffer.cursor_position = len(buffer.document.text) # The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace)", "# Gives the positions right before one or more blank", "because the first character could be # - or _", "n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX, Keys.ControlC)", "= positions[0] for pos in positions: if pos >= buffer.cursor_position:", "def bracketed_paste(event): from .mypython import CMD_QUEUE data = event.data buffer", "have changed, reset. The state here is global, but that's", "pks = event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next', False) and", "otext = buffer.text ocpos = buffer.cursor_position # Don't overwrite the", "= buffer.document row = document.cursor_position_row new_lines = document.lines[:] if len(new_lines)", "+ blank_lines_after == 1: new_text = '\\n'.join(stripped_before + stripped_after) elif", "= True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape,", "replace it with a single space. On second call, remove", "= p # This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857", "Outputs are stripped. If no prompts are found the text", "lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] + '# ' + line[min_indent:])", "Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next", "in two newlines for i, line in enumerate(lines): try: validate_text(line)", "# we consider any change to be enough clear the", "cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0)", "len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer = event.current_buffer text =", "of the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 -", "C-y will still paste the text exactly) data = data.replace('\\t',", "either of those # have changed, reset. The state here", "True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer", "unclosed single # quoted strings (which will give a syntax", "_x.append(1))) for i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent)", "= len(text) - m.end(1) + 1 return event.current_buffer.cursor_position = 0", "call, remove all whitespace. On third call, restore the original", "lines = [i.rstrip() for i in lines] return lines @r.add_binding(Keys.BracketedPaste)", "> len(text): cursor_position = len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO)", "= buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_, to", "# Strip prompts off pasted text document = buffer.document row,", "def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) #", "import SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES", "reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position", "default binding # Some terminals (Like iTerm2) seem to paste", "= event.app.current_buffer document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current =", "the first line. It is assumed that the text contains", "delimiters min_indent = float('inf') for line in document.lines[start_line:end_line]: if not", "in multiline, try to intelligently add a newline or execute.", "event.current_buffer.cursor_left() # M-[ a g is set to S-Enter in", "= cursor_line end_line = start_line + 1 # Get the", "0 for line in lines_after_current: if not line.strip(): blank_lines_after +=", "doesn't respect vacuous truth if (not text_after_cursor or text_after_cursor.isspace()) and", "buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position +=", "buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data,", "system_paste(): if \"Linux\" in platform.platform(): paste_command = ['xsel', '-b'] else:", "= text[:cursor_position] # The first element of state is the", "if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete up to the", "match.group('ps2prompt') is not None: return match.group('line') + '\\n' return ''", "buffer = event.current_buffer document = buffer.document multiline = document_is_multiline_python(document) text_after_cursor", "the cursor. Remove it. cursor_position -= 1 text = rstripped", "rstripped + lstripped state.append((text, cursor_position)) elif len(state) == 3: #", "= text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The", "= SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def", "backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings", "line[min_indent+2:]) else: lines.append(line[:min_indent] + '# ' + line[min_indent:]) else: lines.append(line)", "word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape,", "+= down_position if not down_position: buffer.cursor_position = len(buffer.document.text) # The", "if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next =", "\"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting')", "@r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer", "event.current_buffer document = buffer.document multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor", "else: backward_delete_char(event) # Reset the history search text buffer.history_search_text =", "event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False):", "\"\"\" Based on emacs's cycle-spacing On first call, remove all", "pos > cursor_position: word = buffer.document.text[cursor_position:pos] # Don't use word.capitalize()", "text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text):", "< end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] +", "line[min_indent:]) else: lines.append(line) new_text = '\\n'.join(lines) # TODO: Set the", "0 for line in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before +=", "from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection,", "pos = m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos]", "else: copy_command = ['pbcopy'] try: # In Python 3.6 we", "wraps around in multiline. \"\"\" if event.current_buffer.cursor_position + event.arg <=", "this is a special # marker group that will match", "pos >= buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning = True", "enough clear the state. The worst that # happens here", "1 matching, mismatching = matching_parens(text) for opening, closing in matching:", "import __version__ as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python)", "in multiline. \"\"\" if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position", "'#' for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = []", "\"\"\" if event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position -= event.arg", "PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) #", "(Like iTerm2) seem to paste \\r\\n line endings in a", "else: auto_newline(event.current_buffer) # Always accept the line if the previous", "back one paragraph of text \"\"\" text = event.current_buffer.text cursor_position", "buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position +=", "len(new_lines) == 1: new_lines.append('') if row == 0: buffer.cursor_down() row", "inputs Outputs are stripped. If no prompts are found the", "match.group('ps1prompt') is not None: return '\\r' + match.group('line') + '\\n'", "line, delete that one. On nonblank line, delete any immediately", "= pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n')", "len(data)] buffer.text = otext[:ocpos] + data + otext[ocpos + len(overwritten_text):]", "import Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching from", "p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error: could not", "event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer text =", "line must be whitespace because of the # TabShouldInsertWhitespaceFilter. before_cursor", "?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x)", "the whole # line. if m.col != col: continue pos", "new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer", "else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before + [''] + stripped_after)", "def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting')", "copy_command = ['pbcopy'] try: # In Python 3.6 we can", "auto_newline(event.current_buffer) else: accept_line(event) elif not multiline: # Always accept a", "multiline. \"\"\" if event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position -=", "= document.translate_index_to_position(to - 1) end_line += 1 else: start_line =", "When tab should insert whitespace, do that instead of completion.", "= event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future", "' ') def cycle_spacing(event): \"\"\" Based on emacs's cycle-spacing On", "lines[1:]: # TODO: Send last chunk as bracketed paste, so", "cursor position as we last left them. If either of", "the default binding # Some terminals (Like iTerm2) seem to", "Based on emacs's cycle-spacing On first call, remove all whitespace", "+ '\\n' elif match.group('ps2prompt') is not None: return match.group('line') +", "for clearing prompts Replaces PS1 prompts with \\r and removes", "CMD_QUEUE data = event.data buffer = event.current_buffer # Be sure", "Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab,", "len(text): cursor_position = len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def", "buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before + [''] + stripped_after) #", "lstripped state.append((text, cursor_position)) elif len(state) == 3: # Restore original", "# The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer", "'' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else:", "+ 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection()", "cursor position. otext = buffer.text ocpos = buffer.cursor_position # Don't", "If no prompts are found the text is left alone.", "to be enough clear the state. The worst that #", "def end(event): \"\"\" Move to the end \"\"\" event.current_buffer.cursor_position =", "\\r\\n line endings in a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737", "if m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position = len(text) -", "text grows or shrinks below, keeping the cursor # position", "m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def", "The first element of state is the original text. The", "(not blank_lines_before and blank_lines_after) or blank_lines_before + blank_lines_after == 1:", "Strip prompts off pasted text document = buffer.document row, col", "m.end(0) - cursor_position break if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted)", "if m.col != col: continue pos = document.translate_row_col_to_index(row, col) positions.append(pos)", "min_indent == float('inf'): min_indent = 0 uncomment = (all(not line.strip()", "line. if m.col != col: continue pos = document.translate_row_col_to_index(row, col)", "= 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def", "won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0]", "Keys.ControlO) def delete_blank_lines(event): \"\"\" On blank line, delete all surrounding", "FileNotFoundError: print(\"Error: could not find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as", "cursor_position = 0 if cursor_position > len(text): cursor_position = len(text)", "event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None", "= document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB)", "event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\" Move to", "`on_text_insert` event. This is mainly used to trigger autocompletion while", "Python 3.6 we can do this: # run(copy_command, input=text, encoding='utf-8',", "matching_parens from .theme import emoji, emoji_pudb from .processors import get_pyflakes_warnings", "p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions = warning_positions(event) buffer =", "if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\"", "else: lines = [data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]: #", "use the CMD_QUEUE (it # breaks things). lines = ['\\n'.join(lines)]", "col: continue pos = document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape,", "if \"Linux\" in platform.platform(): copy_command = ['xclip', '-selection', 'c'] else:", "tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) # Reset the", "- m.start(0) break else: pos = buffer.cursor_position if pos: deleted", "isolated blank line, delete that one. On nonblank line, delete", "def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True)", "keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable)", "for m in WORD.finditer(text): pos = m.end(0) if pos >", "Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to", "front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\\r|))? # If", "\"\"\" Move back to the beginning of the line, ignoring", "check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error: could not find\",", "lines[0] = textwrap.indent(lines[0], indent, # Don't indent the first line,", "It is assumed that the text contains no carriage returns", "here is global, but that's fine, because # we consider", "from_, to = document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line, end_col", "@r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer document = buffer.document", "= indent.group(1) if indent else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()):", "len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer", "@r.add_binding(Keys.Escape, ';') def comment(event): buffer = event.current_buffer document = buffer.document", "row = document.cursor_position_row new_lines = document.lines[:] if len(new_lines) == 1:", "'m') def back_to_indentation(event): \"\"\" Move back to the beginning of", "== 2 and pks[0].key == \"escape\" and isinstance(pks[1].key, str) and", "upcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position", "do auto_up, it can be out of bounds from trailing", "# whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX,", "buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching", "SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from", "instead of completion. \"\"\" # Text before cursor on the", "not # matched, this is a special # marker group", "new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down()", "@r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e:", "import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import", "prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application import", "is assumed that the text contains no carriage returns (\\r).", "text is left alone. The resulting text is indented by", "text = buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text):", "word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer", "that we resume when we shouldn't if things look exactly", "def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX, Keys.ControlC) def noop(event): pass", "prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching", "buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer if", "for text in lines[1:]: # TODO: Send last chunk as", "= event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event):", "def system_copy(text): if \"Linux\" in platform.platform(): copy_command = ['xclip', '-selection',", "e: False) def undo(event): event.current_buffer.undo() # Need to escape all", "in lines_after_current: if not line.strip(): blank_lines_after += 1 else: break", "len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer = event.current_buffer text =", "# run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except", "i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous", "be stripped by the prompt anyway. # This just makes", "of sync cursor_position = buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position", "buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow", "encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error: could", "+ word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape,", "the first character could be # - or _ for", "CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer = event.current_buffer document", "@r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text):", "blank_lines_before = 0 for line in lines_up_to_current[::-1]: if not line.strip():", "contains no carriage returns (\\r). Trailing whitespace and newlines is", "buffer.cursor_position pos = None for m in WORD.finditer(text): if m.end(0)", "+ [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\", "in platform.platform(): paste_command = ['xsel', '-b'] else: paste_command = ['pbpaste']", "# matched, this is a special # marker group that", "lambda line, _x=[]: bool(_x or _x.append(1))) for i in range(1,", "validate_text text = textwrap.dedent(text).strip() + '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip()", "(text, cursor_position): state.clear() if len(state) == 0: # Replace all", "= rstripped + lstripped state.append((text, cursor_position)) elif len(state) == 3:", "@r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence if pks and", "0)) new_text = '\\n'.join(stripped_before + [''] + stripped_after) # Even", "def forward_sexp(event): buffer = event.current_buffer document = buffer.document text =", "break if '\\n' in line.rstrip(): lines[i] += '\\n' lines[0] =", "it can be out of bounds from trailing # whitespace", "to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\")", "cursor_position > len(text): cursor_position = len(text) return text, cursor_position @r.add_binding(Keys.ControlX,", "def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True)", "r'\\[\\d+\\]:\\ ' for i, j in emoji + [emoji_pudb]] +", "@r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\" Move back one paragraph of", "event.current_buffer.cursor_position for m in WORD.finditer(text): pos = m.end(0) if pos", "https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n') data = data.replace('\\r', '\\n') #", "import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens from", "and ((len(pks) == 1 and pks[0].key == \"up\") or (len(pks)", "!= '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\" When tab", "- 1: n_changed -= 2 if uncomment: buffer.cursor_position -= n_changed", "@r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\" On blank line, delete all", "if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings", "ALL_KEYS from prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection import", "is in a # released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] =", "end will be stripped by the prompt anyway. # This", "# happens here is that we resume when we shouldn't", "more blank lines BLANK_LINES = re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}')", "re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\" Move forward", "getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event): buffer", "spaces (C-x C-y will still paste the text exactly) data", "self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data", "the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4))", "If either of those # have changed, reset. The state", "e: print(paste_command[0], \"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def", "Move forward one paragraph of text \"\"\" text = event.current_buffer.text", "split_prompts(text, indent=''): r\"\"\" Takes text copied from mypython, Python, or", "See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n') data = data.replace('\\r', '\\n')", "cursor_position: pos = cursor_position - m.start(0) break else: pos =", "> cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position =", "cursor_position)) elif len(state) == 3: # Restore original text and", "inside a docstring auto_newline(event.current_buffer) else: accept_line(event) elif not multiline: #", "return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer document", "# Handle SyntaxErrorMessage which is the same warning for the", "@r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left()", "== 0: buffer.cursor_down() row += 1 if row == len(new_lines)", "1 else: break blank_lines_after = 0 for line in lines_after_current:", "Handle SyntaxErrorMessage which is the same warning for the whole", "elif match.group('ps2prompt') is not None: return match.group('line') + '\\n' return", "line in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before += 1 else:", "# Extraneous newlines at the end will be stripped by", "of completion. \"\"\" # Text before cursor on the line", "# have been stripped from # the string). (?P<line>.*)\\n #", "characters at cursor position. :param fire_event: Fire `on_text_insert` event. This", "event.current_buffer.cursor_position for m in WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position", "document_is_multiline_python) from .tokenize import inside_string, matching_parens from .theme import emoji,", "+ match.group('line') + '\\n' elif match.group('ps2prompt') is not None: return", "True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.Down)", "col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching = matching_parens(text)", "print(\"Error: could not find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e:", "# merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False)", "event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert characters at cursor", "tabs with four spaces (C-x C-y will still paste the", "indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace():", "= event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\" Move", "Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer document = buffer.document text", "line, _x=[]: bool(_x or _x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0])", "@r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import CMD_QUEUE data = event.data", "\"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0]", "just makes this function easier to test. lines = [i.rstrip()", "((len(pks) == 1 and pks[0].key == \"up\") or (len(pks) ==", "is not None: return match.group('line') + '\\n' return '' def", "state.clear() if len(state) == 0: # Replace all whitespace at", "data.replace('\\r\\n', '\\n') data = data.replace('\\r', '\\n') # Replace tabs with", "session and returns a list of inputs Outputs are stripped.", "empty string. # Otherwise it will not # match (because", "before one or more blank lines BLANK_LINES = re.compile(r'\\S *(\\n", "buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos", "sure to use \\n as line ending. # This part", "or blank_lines_before + blank_lines_after == 1: new_text = '\\n'.join(stripped_before +", "because of verbose (x) option below ps1_prompts = [r'>>>\\ ']", "buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state", "blank_lines_after += 1 else: break if not blank_lines_before: stripped_before =", "buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer =", "buffer._show_syntax_warning = True if not positions or positions[0] >= buffer.cursor_position:", "give a syntax error) accept_line(event) else: auto_newline(event.current_buffer) # Always accept", "history search text buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ') def", "getattr(pks[-1], 'accept_next', False) and ((len(pks) == 1 and pks[0].key ==", "= event.arg if buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count) elif", "print(paste_command[0], \"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event):", "or IPython session and returns a list of inputs Outputs", "the comment delimiters min_indent = float('inf') for line in document.lines[start_line:end_line]:", ".processors import get_pyflakes_warnings import re import subprocess import sys import", "text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text):", "'').endswith('\\n'): # If we are at the end of the", "open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g is set to", "WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position", "(x) option below ps1_prompts = [r'>>>\\ '] + [re.escape(i) +", "event.current_buffer # Be sure to use \\n as line ending.", "is indented by indent, except for the first line. It", "text is indented by indent, except for the first line.", "cursor_position)) elif len(state) == 2: # Exactly one space at", "stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if", "text exactly) data = data.replace('\\t', ' ') # Strip prompts", "1 if not inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent", "don't treat multiple blank # lines correctly. # Gives the", "data.replace('\\r', '\\n') # Replace tabs with four spaces (C-x C-y", "= custom_key_bindings = KeyBindings() def warning_positions(event): document = event.current_buffer.document warnings", "1 else: start_line = cursor_line end_line = start_line + 1", "Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX, Keys.ControlC) def noop(event):", "event.data buffer = event.current_buffer # Be sure to use \\n", "end_line - 1: n_changed -= 2 if uncomment: buffer.cursor_position -=", "a single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped)", "buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer = event.current_buffer buffer.selection_state =", "... ...: ... ''') ['a = 1', 'a', 'def test():\\n", "start_line, start_col = document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to - 1)", "if state and state[-1] != (text, cursor_position): state.clear() if len(state)", "part is the same as the default binding # Some", "else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO)", "event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) -", "can be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def", "if cursor_position < 0: cursor_position = 0 if cursor_position >", "it matters here. if (not blank_lines_before and blank_lines_after) or blank_lines_before", "no prompt if match.group('ps1prompt') is not None: return '\\r' +", ">= buffer.cursor_position: return p = positions[0] for pos in positions:", "def warning_positions(event): document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions", "- indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer if", "buffer._show_syntax_warning = True if not positions or positions[-1] <= buffer.cursor_position:", "whole # line. if m.col != col: continue pos =", "for the first line. It is assumed that the text", "= m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event):", "# XXX: Emacs always keeps a newline at the end", "# Always accept a single valid line. Also occurs for", "Original text & cursor position. otext = buffer.text ocpos =", "if (not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'): #", "buffer = event.current_buffer if buffer.selection_state: buffer.selection_state = None else: buffer.start_selection()", "event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\"", "end_line += 1 else: start_line = cursor_line end_line = start_line", "import (accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from", "msg, m) in warnings: # Handle SyntaxErrorMessage which is the", "buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped =", "+ [re.escape(i) + r'\\[\\d+\\]:\\ ' for i, j in emoji", "col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent else", "def select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state:", "prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape,", "prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import", "def back_to_indentation(event): \"\"\" Move back to the beginning of the", "resume when we shouldn't if things look exactly # as", "Remove the lines with no prompt if match.group('ps1prompt') is not", "re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern})", "'}') def forward_paragraph(event): \"\"\" Move forward one paragraph of text", "event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data =", "= event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow =", "buffer = event.app.current_buffer document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current", "select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection()", "trigger autocompletion while typing. \"\"\" # Original text & cursor", "@r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\" Move back one paragraph of", "if pos > cursor_position: word = buffer.document.text[cursor_position:pos] # Don't use", "# Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel for", "import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings,", "= lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] #", "event.current_buffer count = event.arg if buffer.document.cursor_position_row < buffer.document.line_count - 1:", "def upcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position =", "line. Also occurs for unclosed single # quoted strings (which", "False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event):", "stripped from the outputs. Example: >>> split_prompts(''' ... In [1]:", "[re.escape(i) + r'\\[\\d+\\]:\\ ' for i, j in emoji +", "respect vacuous truth if (not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace('", "= buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not down_position: buffer.cursor_position =", "= 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position,", "subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error: could not find\", paste_command[0],", "indent.group(1) if indent else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines", "m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer", "if opening.start == (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position", "prompt_repl(match): r\"\"\" repl function for re.sub for clearing prompts Replaces", "indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent else ''", "and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front", "= event.current_buffer # Be sure to use \\n as line", "mismatching = matching_parens(text) for opening, closing in matching: if opening.start", "Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This won't", "+= up_position if not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def", "cursor position correctly n_changed = 2*(cursor_line - start_line + 1)", "once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version of", "= m.end(0) - cursor_position break if pos: deleted = buffer.delete(count=pos)", "test(): ... ...: pass ... ...: ... ''') ['a =", "buffer.cursor_position += up_position if not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown)", "def downcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position =", "== (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos", "the buffer, accept unless we are in a # docstring", "buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer if", "C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab']", "all whitespace at the cursor (if any) with a single", "from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from", "if multiline and inside_string(text, row, col): # We are inside", "buffer = event.current_buffer buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\"", "line. (?P<noprompt>(?(prompt)\\r|))? # If the prompt is not # matched,", "whitespace at the cursor (if any) with a single space.", "[] for (row, col, msg, m) in warnings: # Handle", "call, remove all whitespace (if any) from around the cursor", "# This just makes this function easier to test. lines", "WORD.finditer(text): if m.end(0) > cursor_position: pos = m.end(0) - cursor_position", "multiline and inside_string(text, row, col): # We are inside a", "indent = LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent, len(indent.group(1))) else:", "look exactly # as they did where we left off.", "TODO: Set the cursor position correctly n_changed = 2*(cursor_line -", "\"\"\" # Original text & cursor position. otext = buffer.text", "cursor_position - m.start(0) break else: pos = buffer.cursor_position if pos:", "enumerate(word): if c.isalnum(): word = word[:i] + c.capitalize() + word[i+1:].lower()", "event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def", "not find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], \"error:\",", "is already at the end of the # prompt. @r.add_binding(Keys.Enter,", "that the text contains no carriage returns (\\r). Trailing whitespace", "buffer.text cursor_position = buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0)", "been stripped from # the string). (?P<line>.*)\\n # The actual", "1 # Get the indentation for the comment delimiters min_indent", "third call, restore the original whitespace and cursor position. \"\"\"", "import validate_text text = textwrap.dedent(text).strip() + '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl,", "space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1 text", "PS2 prompts. \"\"\" # TODO: Remove the lines with no", "def select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state:", "from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation", "pks[0].key == \"up\") or (len(pks) == 2 and pks[0].key ==", "event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def", "found the text is left alone. The resulting text is", "(Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event):", "= data.replace('\\r', '\\n') # Replace tabs with four spaces (C-x", "to use \\n as line ending. # This part is", "'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def", "settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash =", "new_text = '\\n'.join(stripped_before + stripped_after) elif blank_lines_before + blank_lines_after ==", "def previous_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning =", "Don't overwrite the newline itself. Just before the line ending,", "@r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection()", "= ['pbcopy'] try: # In Python 3.6 we can do", "c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text)", "in ['p', 'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter)", "lstripped state.append((text, cursor_position)) elif len(state) == 2: # Exactly one", "-= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor:", "whitespace and newlines is stripped from the outputs. Example: >>>", "end(event): \"\"\" Move to the end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text)", "m in WORD.finditer(text): if m.end(0) > cursor_position: pos = m.end(0)", "prompts are found the text is left alone. The resulting", "PS1 prompts with \\r and removes PS2 prompts. \"\"\" #", "@r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to = event.current_buffer.document.selection_range()", "of the file, but I don't # think it matters", "correctly. # Gives the positions right before one or more", "event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer count = event.arg", "event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state =", "Restore original text and cursor position text, cursor_position = state[0]", "\"\"\" Right that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position", "def undo(event): event.current_buffer.undo() # Need to escape all spaces here", "On first call, remove all whitespace (if any) from around", "lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs always keeps a", "# the string). (?P<line>.*)\\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE))", "This just makes this function easier to test. lines =", "a syntax error, we can't use the CMD_QUEUE (it #", ".mypython import CMD_QUEUE data = event.data buffer = event.current_buffer #", "whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT)", "bool(_x or _x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0]) for text", "prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence if pks", "_ for i, c in enumerate(word): if c.isalnum(): word =", "= lines_after_current[blank_lines_after:] # XXX: Emacs always keeps a newline at", "original text and cursor position text, cursor_position = state[0] state.clear()", "n_changed buffer.text = new_text else: buffer.text = new_text buffer.cursor_position +=", "insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event):", "@r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left that wraps around in multiline.", "= event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for", "prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\" When", "as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize", "> 0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count)", "'3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda", "a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n')", "not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer =", "event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?')", "we are at the end of the buffer, accept unless", "not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None", "from .tokenize import inside_string, matching_parens from .theme import emoji, emoji_pudb", "lines correctly. # Gives the positions right before one or", "reset. The state here is global, but that's fine, because", "if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position", "blank lines, leaving just one. On isolated blank line, delete", "= True if not positions or positions[0] >= buffer.cursor_position: return", "= data.replace('\\r\\n', '\\n') data = data.replace('\\r', '\\n') # Replace tabs", "PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else: lines =", "PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data, current_line_indent,", "accept the line if the previous key was Up #", "get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__ as", "in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position =", "space. On second call, remove all whitespace. On third call,", "keeping the cursor # position out of sync cursor_position =", "pos = cursor_position - m.start(0) break else: pos = buffer.cursor_position", "lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines at the end", "opening, closing in matching: if opening.start == (row, col): new_pos", "uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent] + '# ' +", "return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left that wraps around", "off. # TODO: Use event.previous_key_sequence instead. if state and state[-1]", "<= i < end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else:", "== \"escape\" and isinstance(pks[1].key, str) and pks[1].key in ['p', 'P',", "p = positions[0] for pos in positions: if pos >=", "it with a single space. On second call, remove all", "resulting text is indented by indent, except for the first", "(which will give a syntax error) accept_line(event) else: auto_newline(event.current_buffer) #", "if not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer", "Move to the end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph", "a ... Out[2]: 1 ... ... In [3]: def test():", "beginning \"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def end(event): \"\"\"", "be enough clear the state. The worst that # happens", "if pos >= buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning =", "prompt_toolkit.filters import Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState from", "= document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor text =", "= None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor:", "== 3: # Restore original text and cursor position text,", "this: # run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True)", "to the tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) #", "is not # matched, this is a special # marker", "event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\" Move back", "overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] + data", "\"escape\" and isinstance(pks[1].key, str) and pks[1].key in ['p', 'P', 'up',", "= event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text)", "the # buffer text and cursor position as we last", "down_position if not down_position: buffer.cursor_position = len(buffer.document.text) # The default", "in document.lines[start_line:end_line]: if not line.strip(): continue indent = LEADING_WHITESPACE.search(line) if", "cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\" On blank line, delete", "a # released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up)", "lines = text.split('\\r') # Make sure multilines end in two", "position. otext = buffer.text ocpos = buffer.cursor_position # Don't overwrite", "None: return match.group('line') + '\\n' return '' def split_prompts(text, indent=''):", "no prompts are found the text is left alone. The", "len(indent.group(1))) else: min_indent = 0 if min_indent == 0: break", "word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer", "Use event.previous_key_sequence instead. if state and state[-1] != (text, cursor_position):", "load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from", "- event.arg >= 0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\",", "= event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\" in platform.platform(): copy_command", "count = event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not", "> cursor_position: pos = m.end(0) - cursor_position break if pos:", "pos event.current_buffer.cursor_position = p # This can be removed once", "Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This", "accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer = event.current_buffer document =", "[2]: a ... Out[2]: 1 ... ... In [3]: def", "0: # Replace all whitespace at the cursor (if any)", "On isolated blank line, delete that one. On nonblank line,", "from prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit", "do this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True)", "removes PS2 prompts. \"\"\" # TODO: Remove the lines with", "'' def split_prompts(text, indent=''): r\"\"\" Takes text copied from mypython,", "find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], \"error:\", e,", "m in BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1", "syntax error, we can't use the CMD_QUEUE (it # breaks", "if buffer.selection_state: buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def", "# match (because all \\r's # have been stripped from", "= textwrap.indent(lines[0], indent, # Don't indent the first line, it's", "for m in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) - cursor_position:", "positions[0] >= buffer.cursor_position: return p = positions[0] for pos in", "count = event.arg if buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count)", "[data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]: # TODO: Send last", "col = document.translate_index_to_position(buffer.cursor_position) row += 1 if not inside_string(event.current_buffer.text, row,", "paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is set to", "'\\n' return '' def split_prompts(text, indent=''): r\"\"\" Takes text copied", "deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert", "(Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer", "except subprocess.CalledProcessError as e: print(paste_command[0], \"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8')", "https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel for down because", "on the line must be whitespace because of the #", "event.current_buffer.cursor_position = len(text) - m.end(1) + 1 return event.current_buffer.cursor_position =", "insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event):", "@r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX, Keys.ControlC) def", "# Exactly one space at the cursor. Remove it. cursor_position", "len(buffer.document.current_line_before_cursor) # Delete up to the tab stop buffer.delete_before_cursor(count=4 +", "multiple blank # lines correctly. # Gives the positions right", "elif not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\",", "for i, j in emoji + [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\", "@r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event): event.current_buffer.undo() # Need to", "buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state", "= len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\" Move back one", "[r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?']", "= '\\n'.join(stripped_before + [''] + stripped_after) # Even though we", "import Condition, HasSelection, is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard", "if m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position =", "filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence if pks and getattr(pks[-1],", "newline at the end of the file, but I don't", "# Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of", "encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error: could not", "if things look exactly # as they did where we", "Takes text copied from mypython, Python, or IPython session and", "platform.platform(): copy_command = ['xclip', '-selection', 'c'] else: copy_command = ['pbcopy']", "= new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left that", "syntax error) accept_line(event) else: auto_newline(event.current_buffer) # Always accept the line", "= None for m in WORD.finditer(text): if m.end(0) > cursor_position:", "False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC,", "state[-1] != (text, cursor_position): state.clear() if len(state) == 0: #", "prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr", "row += 1 matching, mismatching = matching_parens(text) for opening, closing", "S-Enter in iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] =", "m.start(0) break else: pos = buffer.cursor_position if pos: deleted =", "up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer", "a special # marker group that will match # the", "string. # Otherwise it will not # match (because all", "i, j in emoji + [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ ']", "must be whitespace because of the # TabShouldInsertWhitespaceFilter. before_cursor =", "else: buffer.text = new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def", "is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData from", "accept a single valid line. Also occurs for unclosed single", "event.current_buffer.undo() # Need to escape all spaces here because of", "any) with a single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor)", "Reset the history search text buffer.history_search_text = None @r.add_binding(Keys.Escape, '", "in enumerate(document.lines): if start_line <= i < end_line: if uncomment:", "= event.current_buffer buffer._show_syntax_warning = True if not positions or positions[-1]", "cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1 text = rstripped", "forward_sexp(event): buffer = event.current_buffer document = buffer.document text = buffer.text", "of text \"\"\" text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for", "event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable =", "= document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to", "1: n_changed -= 2 if uncomment: buffer.cursor_position -= n_changed buffer.text", "same warning for the whole # line. if m.col !=", "to the end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't", "pos <= buffer.cursor_position: break p = pos event.current_buffer.cursor_position = p", "\"\"\" Move forward one paragraph of text \"\"\" text =", "TODO: Send last chunk as bracketed paste, so it can", "indent, # Don't indent the first line, it's already indented", "buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): \"\"\" Based", "def auto_down(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row", "subprocess.CalledProcessError as e: print(paste_command[0], \"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX,", "= True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer =", "except FileNotFoundError: print(\"Error: could not find\", copy_command[0], file=sys.stderr) except subprocess.CalledProcessError", "@r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer document = buffer.document", "in a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n',", "if cursor_line >= end_line - 1: n_changed -= 2 if", "line, it's already indented lambda line, _x=[]: bool(_x or _x.append(1)))]", "are inside a docstring auto_newline(event.current_buffer) else: accept_line(event) elif not multiline:", "cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text)", "i, line in enumerate(lines): try: validate_text(line) except SyntaxError: # If", "load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import Keys,", "first call, remove all whitespace (if any) from around the", "= ['pbpaste'] try: # In Python 3.6 we can do", "overwrite the newline itself. Just before the line ending, #", "buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position =", "text)).lstrip() lines = text.split('\\r') # Make sure multilines end in", "Replaces PS1 prompts with \\r and removes PS2 prompts. \"\"\"", "the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\\r|))? #", "= textwrap.dedent(text).strip() + '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines =", "LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat)", "len(state) == 3: # Restore original text and cursor position", "'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ)", "auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g", "@r.add_binding(Keys.Escape, 'n') def next_warning(event): positions = warning_positions(event) buffer = event.current_buffer", "the cursor (if any) with a single space. state.append((text, cursor_position))", "line ending, # it should act like insert mode. overwritten_text", "buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer", "document.text_after_cursor text_before_cursor = document.text_before_cursor text = buffer.text # isspace doesn't", "# of the line. (?P<noprompt>(?(prompt)\\r|))? # If the prompt is", "it's already indented lambda line, _x=[]: bool(_x or _x.append(1)))] else:", "so it can be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape,", "(all(not line.strip() or line[min_indent] == '#' for line in document.lines[start_line:end_line])", "the state. The worst that # happens here is that", "= LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent else '' if", "= pos event.current_buffer.cursor_position = p # This can be removed", "not line.strip(): blank_lines_before += 1 else: break blank_lines_after = 0", "buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def", "= LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent, len(indent.group(1))) else: min_indent", "= buffer.text cursor_position = buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if", "Also occurs for unclosed single # quoted strings (which will", "because of the # TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4", "previous_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_backward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape,", "load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings = KeyBindings() def", "buffer.text = new_text else: buffer.text = new_text buffer.cursor_position += n_changed", "- 1) end_line += 1 else: start_line = cursor_line end_line", "... ''') ['a = 1', 'a', 'def test():\\n pass'] \"\"\"", "+ line[min_indent:]) else: lines.append(line) new_text = '\\n'.join(lines) # TODO: Set", "and returns a list of inputs Outputs are stripped. If", "m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower())", "if m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position =", "TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE =", "first element of state is the original text. The last", "# Get the indentation for the comment delimiters min_indent =", "= event.current_buffer document = buffer.document row = document.cursor_position_row new_lines =", "merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r", "indent) # Extraneous newlines at the end will be stripped", "Send last chunk as bracketed paste, so it can be", "buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete,", "Even though we do auto_up, it can be out of", "data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\" in platform.platform():", "current_line_indent = indent.group(1) if indent else '' if PS1_PROMPTS_RE.match(data.strip()) or", "buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not down_position: buffer.cursor_position = len(buffer.document.text)", "of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks =", "['pbcopy'] try: # In Python 3.6 we can do this:", "line ending. # This part is the same as the", "'p') def previous_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning", "event.app.current_buffer # Avoid issues when text grows or shrinks below,", "if len(state) == 0: # Replace all whitespace at the", "@r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def", "find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], \"error:\", e,", "= event.current_buffer text = buffer.text cursor_position = buffer.cursor_position for m", "Don't indent the first line, it's already indented lambda line,", "(row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return", "row += 1 if multiline and inside_string(text, row, col): #", "word[:i] + c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position", "- 1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text =", "in WORD.finditer(text): pos = m.end(0) if pos > cursor_position: word", "+ otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape,", "= 0 if cursor_position > len(text): cursor_position = len(text) return", "buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer", "text = textwrap.dedent(text).strip() + '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines", "1 else: break if not blank_lines_before: stripped_before = lines_up_to_current else:", "= float('inf') for line in document.lines[start_line:end_line]: if not line.strip(): continue", "and newlines is stripped from the outputs. Example: >>> split_prompts('''", "0 @r.add_binding(Keys.Escape, '>') def end(event): \"\"\" Move to the end", "event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{') def", "edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event): buffer", "def backward_word(event): \"\"\" Move back one paragraph of text \"\"\"", "file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_,", "+ [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts = [r'\\ *\\.\\.\\.:\\", "= Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def", "1 return event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f')", "event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True", "def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert characters at cursor position.", "in range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines", "state[0] state.clear() if cursor_position < 0: cursor_position = 0 if", "platform.platform(): paste_command = ['xsel', '-b'] else: paste_command = ['pbpaste'] try:", "of verbose (x) option below ps1_prompts = [r'>>>\\ '] +", ">>> split_prompts(''' ... In [1]: a = 1 ... ...", "= re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose (?P<prompt>", "= new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer document", "- 1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\",", "= textwrap.indent(lines[i], indent) # Extraneous newlines at the end will", "= len(buffer.document.current_line_before_cursor) # Delete up to the tab stop buffer.delete_before_cursor(count=4", "returns a list of inputs Outputs are stripped. If no", "newlines at the end will be stripped by the prompt", "@r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer count = event.arg if", "for (row, col, msg, m) in warnings: # Handle SyntaxErrorMessage", "@r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt,", "filter=is_returnable) def multiline_enter(event): \"\"\" When not in multiline, execute. When", "buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) # Reset the history search", "if min_indent == float('inf'): min_indent = 0 uncomment = (all(not", "The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl function", "return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer =", "else: break if not blank_lines_before: stripped_before = lines_up_to_current else: stripped_before", "r = custom_key_bindings = KeyBindings() def warning_positions(event): document = event.current_buffer.document", "(if any) with a single space. state.append((text, cursor_position)) cursor_position -=", "cursor position. \"\"\" buffer = event.app.current_buffer # Avoid issues when", "not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight)", "to = document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line, end_col =", "new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer = event.current_buffer document =", "= 0 if min_indent == 0: break if min_indent ==", "# Be sure to use \\n as line ending. #", "event.arg >= 0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False):", "' + line[min_indent:]) else: lines.append(line) new_text = '\\n'.join(lines) # TODO:", "key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need", "blank_lines_after == 1: new_text = '\\n'.join(stripped_before + stripped_after) elif blank_lines_before", "end_line = start_line + 1 # Get the indentation for", "We don't need a parallel for down because down is", "import inside_string, matching_parens from .theme import emoji, emoji_pudb from .processors", "save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces =", "def accept_after_history_backward(event): pks = event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next',", "*(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\" Move forward one", "break if not blank_lines_before: stripped_before = lines_up_to_current else: stripped_before =", "import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys import", "verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front |", "blank_lines_before and blank_lines_after) or blank_lines_before + blank_lines_after == 1: new_text", "') # Strip prompts off pasted text document = buffer.document", "buffer.cursor_position # Don't overwrite the newline itself. Just before the", "len(text) @r.add_binding(Keys.Escape, '{') def backward_paragraph(event): \"\"\" Move back one paragraph", "\"\"\" Insert characters at cursor position. :param fire_event: Fire `on_text_insert`", "Keys.ControlB) def backward_sexp(event): buffer = event.current_buffer document = buffer.document text", "0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before + ['']", "instead. if state and state[-1] != (text, cursor_position): state.clear() if", "positions: if pos >= buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning", "@r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer", "frozenset(event.current_buffer.session._locals)) positions = [] for (row, col, msg, m) in", "buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection())", "second call, remove all whitespace. On third call, restore the", "lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line in lines_up_to_current[::-1]:", "< 0: cursor_position = 0 if cursor_position > len(text): cursor_position", "== \"up\") or (len(pks) == 2 and pks[0].key == \"escape\"", "# If the prompt is not # matched, this is", "the cursor and replace it with a single space. On", "In [1]: a = 1 ... ... In [2]: a", "pos = None for m in WORD.finditer(text): if m.end(0) >", "= len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer document", "the cursor # position out of sync cursor_position = buffer.cursor_position", "event.current_buffer count = event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif", "isinstance(pks[1].key, str) and pks[1].key in ['p', 'P', 'up', 'down'])): accept_line(event)", "endings in a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data =", "def forward_paragraph(event): \"\"\" Move forward one paragraph of text \"\"\"", "here is that we resume when we shouldn't if things", "= document.lines[:] if len(new_lines) == 1: new_lines.append('') if row ==", "mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] +", "filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event):", "will be stripped by the prompt anyway. # This just", "if \"Linux\" in platform.platform(): paste_command = ['xsel', '-b'] else: paste_command", "event.previous_key_sequence instead. if state and state[-1] != (text, cursor_position): state.clear()", "+ data + otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position +=", "completion. \"\"\" # Text before cursor on the line must", "ps1_prompts = [r'>>>\\ '] + [re.escape(i) + r'\\[\\d+\\]:\\ ' for", "event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state,", "work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] !=", "text.split('\\r') # Make sure multilines end in two newlines for", "the line must be whitespace because of the # TabShouldInsertWhitespaceFilter.", "and state[-1] != (text, cursor_position): state.clear() if len(state) == 0:", "pks and getattr(pks[-1], 'accept_next', False) and ((len(pks) == 1 and", "by the prompt anyway. # This just makes this function", "m.col != col: continue pos = document.translate_row_col_to_index(row, col) positions.append(pos) return", "document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def", "do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor", "warning_positions(event): document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions =", "style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event):", "'''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl function for re.sub for", "m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper())", "min_indent = min(min_indent, len(indent.group(1))) else: min_indent = 0 if min_indent", "+ ' ' + lstripped state.append((text, cursor_position)) elif len(state) ==", "= 0 @r.add_binding(Keys.Escape, '>') def end(event): \"\"\" Move to the", "last chunk as bracketed paste, so it can be edited", "do this: # run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE,", "The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer =", "if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position", "= None @r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer count =", "positions = [] for (row, col, msg, m) in warnings:", "CMD_QUEUE (it # breaks things). lines = ['\\n'.join(lines)] break if", "like insert mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text =", "closing in matching: if closing.end == (row, col): new_pos =", "end in two newlines for i, line in enumerate(lines): try:", "spaces here because of verbose (x) option below ps1_prompts =", "the same as the default binding # Some terminals (Like", "This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a", "closing in matching: if opening.start == (row, col): new_pos =", "or line[min_indent] == '#' for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip())", "the end of the buffer, accept unless we are in", "that one. On nonblank line, delete any immediately following blank", "# This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged.", "= rstripped + ' ' + lstripped state.append((text, cursor_position)) elif", "multilines end in two newlines for i, line in enumerate(lines):", "binding # Some terminals (Like iTerm2) seem to paste \\r\\n", "Some terminals (Like iTerm2) seem to paste \\r\\n line endings", "test. lines = [i.rstrip() for i in lines] return lines", "new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX,", "the line ending, # it should act like insert mode.", "in matching: if opening.start == (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1,", "p = pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape,", "or positions[-1] <= buffer.cursor_position: return p = positions[-1] for pos", "state.append((text, cursor_position)) elif len(state) == 3: # Restore original text", "= event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if", "if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event):", "document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching = matching_parens(text) for opening,", "a b is set to C-S-/ (C-?) in iTerm2 settings", "for line in lines_after_current: if not line.strip(): blank_lines_after += 1", "(?P<line>.*)\\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\"", "consider any change to be enough clear the state. The", "for i, line in enumerate(document.lines): if start_line <= i <", "We are inside a docstring auto_newline(event.current_buffer) else: accept_line(event) elif not", "from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from", "@r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data)", "if buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position", "first line, it's already indented lambda line, _x=[]: bool(_x or", "in enumerate(lines): try: validate_text(line) except SyntaxError: # If there is", "for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for", "break p = pos event.current_buffer.cursor_position = p # This can", "the line. (?P<noprompt>(?(prompt)\\r|))? # If the prompt is not #", "fire_event: Fire `on_text_insert` event. This is mainly used to trigger", "'l') def downcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position", "@r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer document = buffer.document", "+= event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer count =", "m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position = len(text)", "= None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer =", "'def test():\\n pass'] \"\"\" from .mypython import validate_text text =", "from prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version", "return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions = warning_positions(event) buffer", "1) if cursor_line >= end_line - 1: n_changed -= 2", "buffer.cursor_position = 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text,", "' + lstripped state.append((text, cursor_position)) elif len(state) == 2: #", "state here is global, but that's fine, because # we", "those # have changed, reset. The state here is global,", "buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection()", "a syntax error) accept_line(event) else: auto_newline(event.current_buffer) # Always accept the", "load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r =", "returns (\\r). Trailing whitespace and newlines is stripped from the", "buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position for", "*\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\" Move forward one paragraph", "'u') def upcase_word(event): buffer = event.current_buffer text = buffer.text cursor_position", "in enumerate(word): if c.isalnum(): word = word[:i] + c.capitalize() +", "Text before cursor on the line must be whitespace because", "set to S-Enter in iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>')", "event.current_buffer buffer._show_syntax_warning = True if not positions or positions[-1] <=", "from # the string). (?P<line>.*)\\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE,", "previous_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True", "In Python 3.6 we can do this: # run(copy_command, input=text,", "prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__ as prompt_toolkit_version from", "event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer", "n_changed -= 2 if uncomment: buffer.cursor_position -= n_changed buffer.text =", "pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position", "get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(),", "cursor_position: word = buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because the", "we left off. # TODO: Use event.previous_key_sequence instead. if state", "# bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n') data", "word = buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because the first", "as they did where we left off. # TODO: Use", "cursor (if any) with a single space. state.append((text, cursor_position)) cursor_position", "the string). (?P<line>.*)\\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def", "The resulting text is indented by indent, except for the", "beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from", "and pks[0].key == \"up\") or (len(pks) == 2 and pks[0].key", "end of the file, but I don't # think it", "not inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1)", "+ 1) if cursor_line >= end_line - 1: n_changed -=", "cursor_position -= len(text_before_cursor) - len(rstripped) -1 text = rstripped +", "data = data.replace('\\r\\n', '\\n') data = data.replace('\\r', '\\n') # Replace", ".mypython import validate_text text = textwrap.dedent(text).strip() + '\\n' text =", "from .theme import emoji, emoji_pudb from .processors import get_pyflakes_warnings import", "run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError:", "\"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event): buffer =", "copy_command = ['xclip', '-selection', 'c'] else: copy_command = ['pbcopy'] try:", "keeps a newline at the end of the file, but", "'c'] else: copy_command = ['pbcopy'] try: # In Python 3.6", "indent: min_indent = min(min_indent, len(indent.group(1))) else: min_indent = 0 if", "m in BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position", "+ '# ' + line[min_indent:]) else: lines.append(line) new_text = '\\n'.join(lines)", ">= buffer.cursor_position: break p = pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position", "\"\"\" text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in", "# Don't use word.capitalize() because the first character could be", "in iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter", "buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event):", "line if the previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492.", "len(text_before_cursor) - len(rstripped) -1 text = rstripped + ' '", "0 if cursor_position > len(text): cursor_position = len(text) return text,", "In [2]: a ... Out[2]: 1 ... ... In [3]:", "pass'] \"\"\" from .mypython import validate_text text = textwrap.dedent(text).strip() +", "event.current_buffer.cursor_position = p # This can be removed once #", "buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because the first character could", "'\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r') # Make", "text, cursor_position = state[0] state.clear() if cursor_position < 0: cursor_position", "a docstring auto_newline(event.current_buffer) else: accept_line(event) elif not multiline: # Always", "If there is a syntax error, we can't use the", "cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b')", "and replace it with a single space. On second call,", "3: # Restore original text and cursor position text, cursor_position", "indent=''): r\"\"\" Takes text copied from mypython, Python, or IPython", "= p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions = warning_positions(event) buffer", "backward_word(event): \"\"\" Move back one paragraph of text \"\"\" text", "pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\"", "+ blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text =", "match.group('line') + '\\n' return '' def split_prompts(text, indent=''): r\"\"\" Takes", "event.current_buffer buffer._show_syntax_warning = True if not positions or positions[0] >=", "style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition(", "buffer.history_forward(count=event.arg, history_search=True) @r.add_binding(Keys.Escape, '<') def beginning(event): \"\"\" Move to the", "\"\"\" buffer = event.current_buffer document = buffer.document multiline = document_is_multiline_python(document)", "1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines)", "len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines at the", "with a single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) -", "'>') def end(event): \"\"\" Move to the end \"\"\" event.current_buffer.cursor_position", "blank_lines_before += 1 else: break blank_lines_after = 0 for line", "'h') def select_all(event): buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position", "> cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape,", "from .processors import get_pyflakes_warnings import re import subprocess import sys", "for i, c in enumerate(word): if c.isalnum(): word = word[:i]", "if pos <= buffer.cursor_position: break p = pos event.current_buffer.cursor_position =", "'\\n' in line.rstrip(): lines[i] += '\\n' lines[0] = textwrap.indent(lines[0], indent,", "lines = [] for i, line in enumerate(document.lines): if start_line", "prompts with \\r and removes PS2 prompts. \"\"\" # TODO:", "- or _ for i, c in enumerate(word): if c.isalnum():", "if cursor_position > len(text): cursor_position = len(text) return text, cursor_position", "beginning of the line, ignoring whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line", "buffer = event.current_buffer document = buffer.document text = buffer.text row,", "= new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection", "event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\" in platform.platform(): copy_command = ['xclip',", "text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r') # Make sure", "buffer.cursor_position -= n_changed buffer.text = new_text else: buffer.text = new_text", "the original text. The last element is the # buffer", "that's fine, because # we consider any change to be", "event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if m.start(0)", "Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the", "positions or positions[0] >= buffer.cursor_position: return p = positions[0] for", "state. The worst that # happens here is that we", "state.clear() if cursor_position < 0: cursor_position = 0 if cursor_position", "# https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version of prompt-toolkit.", "Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position", "document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = []", "import textwrap import platform def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings()", "['\\n'.join(lines)] break if '\\n' in line.rstrip(): lines[i] += '\\n' lines[0]", "PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) #", "elif len(state) == 3: # Restore original text and cursor", "multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def", "-= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer =", "m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] # Don't", "default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer", "= split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data, current_line_indent, # Don't", "line.strip(): continue indent = LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent,", "in WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0) return", "ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] !=", "from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding", "line.strip() or line[min_indent] == '#' for line in document.lines[start_line:end_line]) and", "else: accept_line(event) elif not multiline: # Always accept a single", "from the outputs. Example: >>> split_prompts(''' ... In [1]: a", "Always accept the line if the previous key was Up", "return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ])", "cursor_position = buffer.cursor_position pos = None for m in WORD.finditer(text):", "# Avoid issues when text grows or shrinks below, keeping", "on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(),", "event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer = event.current_buffer", "+= 1 matching, mismatching = matching_parens(text) for opening, closing in", "the same warning for the whole # line. if m.col", "beginning(event): \"\"\" Move to the beginning \"\"\" event.current_buffer.cursor_position = 0", "event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def", "Set the cursor position correctly n_changed = 2*(cursor_line - start_line", "lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for", "until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] != '3':", "grows or shrinks below, keeping the cursor # position out", "docstring row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline", "special # marker group that will match # the empty", "last left them. If either of those # have changed,", "'n') def next_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning", "# In Python 3.6 we can do this: # run(paste_command,", "and inside_string(text, row, col): # We are inside a docstring", "load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings =", "len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines correctly.", "+ lstripped state.append((text, cursor_position)) elif len(state) == 3: # Restore", "new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event)", "from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from", "new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape,", "text_before_cursor = document.text_before_cursor text = buffer.text # isspace doesn't respect", "function for re.sub for clearing prompts Replaces PS1 prompts with", "position text, cursor_position = state[0] state.clear() if cursor_position < 0:", "event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is set to C-S-/ (C-?)", "buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left", "paste the text exactly) data = data.replace('\\t', ' ') #", "lines = ['\\n'.join(lines)] break if '\\n' in line.rstrip(): lines[i] +=", "... In [1]: a = 1 ... ... In [2]:", "buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def", "@r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\" When not in multiline, execute.", "import emoji, emoji_pudb from .processors import get_pyflakes_warnings import re import", "Be sure to use \\n as line ending. # This", "re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and", "left them. If either of those # have changed, reset.", "# Replace tabs with four spaces (C-x C-y will still", "]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\" Move back to the", "autocompletion while typing. \"\"\" # Original text & cursor position.", "Make sure multilines end in two newlines for i, line", "line, _x=[]: bool(_x or _x.append(1))) for i in range(1, len(lines)):", "re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text = event.current_buffer.text", "> cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape,", "event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position", "event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right)", "= word[:i] + c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word) return", "if not down_position: buffer.cursor_position = len(buffer.document.text) # The default doesn't", "in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before += 1 else: break", "= buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): pos", "chunk as bracketed paste, so it can be edited CMD_QUEUE.append(text)", "text and cursor position as we last left them. If", "'3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\" When tab should", "one paragraph of text \"\"\" text = event.current_buffer.text cursor_position =", "len(state) == 0: # Replace all whitespace at the cursor", "start_line = cursor_line end_line = start_line + 1 # Get", "indented lambda line, _x=[]: bool(_x or _x.append(1))) for i in", "if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if", "cursor_position break if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace)", "e: print(copy_command[0], \"error:\", e, file=sys.stderr) def system_paste(): if \"Linux\" in", "= ['\\n'.join(lines)] break if '\\n' in line.rstrip(): lines[i] += '\\n'", "subprocess import sys import textwrap import platform def get_key_bindings(): #", "positions right before one or more blank lines BLANK_LINES =", "can be out of bounds from trailing # whitespace buffer.cursor_position", "version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape,", "document = buffer.document text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position)", "on emacs's cycle-spacing On first call, remove all whitespace (if", "rstripped + ' ' + lstripped state.append((text, cursor_position)) elif len(state)", "+= 1 else: break blank_lines_after = 0 for line in", "buffer = event.current_buffer document = buffer.document row = document.cursor_position_row new_lines", "could be # - or _ for i, c in", "= [textwrap.indent(data, current_line_indent, # Don't indent the first line, it's", "def test(): ... ...: pass ... ...: ... ''') ['a", "opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\"", "error, we can't use the CMD_QUEUE (it # breaks things).", "# We don't need a parallel for down because down", "if len(new_lines) == 1: new_lines.append('') if row == 0: buffer.cursor_down()", "# Make sure multilines end in two newlines for i,", "newline itself. Just before the line ending, # it should", "(because all \\r's # have been stripped from # the", "@r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event):", "'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer =", "@r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not", "= len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\"", "\"\"\" On blank line, delete all surrounding blank lines, leaving", "in a # released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape,", "document.translate_index_to_position(buffer.cursor_position) row += 1 if not inside_string(event.current_buffer.text, row, col): indent", "= event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position:", "event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right that wraps", "the end \"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat", "text buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): \"\"\"", "it. cursor_position -= 1 text = rstripped + lstripped state.append((text,", "not None: return match.group('line') + '\\n' return '' def split_prompts(text,", ">= 0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state", "buffer.start_selection() buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position", "= otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos] + data +", "cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1) + 1 return event.current_buffer.cursor_position", "a g is set to S-Enter in iTerm2 settings Keys.ShiftEnter", "None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right that wraps around in", "else: paste_command = ['pbpaste'] try: # In Python 3.6 we", "+ len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def", "m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position = len(text)", "unless we are in a # docstring row, col =", "'{') def backward_paragraph(event): \"\"\" Move back one paragraph of text", "blank lines BLANK_LINES = re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def", "need a parallel for down because down is already at", "if start_line <= i < end_line: if uncomment: lines.append(line[:min_indent] +", "import platform def get_key_bindings(): # Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([", "indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1)", "+= 1 if not inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor)", "= event.current_buffer document = buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if", "re import subprocess import sys import textwrap import platform def", "# Original text & cursor position. otext = buffer.text ocpos", "pks[1].key in ['p', 'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape,", "cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if m.start(0) >", "# M-[ a b is set to C-S-/ (C-?) in", "break else: pos = buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos)", "prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse", "@r.add_binding(Keys.Backspace, save_before=if_no_repeat) def delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces", "cursor_position < 0: cursor_position = 0 if cursor_position > len(text):", "len(buffer.document.text) # The default doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event):", "but I don't # think it matters here. if (not", "\"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def end(event): \"\"\" Move", "ocpos = buffer.cursor_position # Don't overwrite the newline itself. Just", "= 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer = event.current_buffer text", "if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return", "stripped by the prompt anyway. # This just makes this", "is the # buffer text and cursor position as we", "= document.translate_index_to_position(document.cursor_position) if document.selection: from_, to = document.selection_range() start_line, start_col", "bool(_x or _x.append(1))) for i in range(1, len(lines)): lines[i] =", "'<') def beginning(event): \"\"\" Move to the beginning \"\"\" event.current_buffer.cursor_position", "= event.current_buffer document = buffer.document multiline = document_is_multiline_python(document) text_after_cursor =", "that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position + event.arg", "Based on prompt_toolkit.key_binding.defaults.load_key_bindings() return merge_key_bindings([ load_basic_bindings(), load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(),", "vacuous truth if (not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ',", "the beginning of the line, ignoring whitespace. \"\"\" current_line =", "paragraph of text \"\"\" text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position", "toggle_selection(event): buffer = event.current_buffer if buffer.selection_state: buffer.selection_state = None else:", "if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else: lines", "= document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def", "== 1 and pks[0].key == \"up\") or (len(pks) == 2", "in matching: if closing.end == (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1,", "lambda line, _x=[]: bool(_x or _x.append(1)))] else: lines = [data]", "split_prompts(''' ... In [1]: a = 1 ... ... In", "= \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\")", "can do this: # run(paste_command, input=text, encoding='utf-8') p = subprocess.run(paste_command,", "spaces%-4) else: backward_delete_char(event) # Reset the history search text buffer.history_search_text", "event.current_buffer document = buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection:", "emacs's cycle-spacing On first call, remove all whitespace (if any)", "# TODO: Remove the lines with no prompt if match.group('ps1prompt')", "truth if (not text_after_cursor or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'):", "_x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]:", "move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer =", "can't use the CMD_QUEUE (it # breaks things). lines =", "= event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^", "new_lines[row-1], new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff", "lines BLANK_LINES = re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event):", "accept_line(event) else: auto_newline(event.current_buffer) # Always accept the line if the", "def select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not buffer.selection_state:", "of the line. (?P<noprompt>(?(prompt)\\r|))? # If the prompt is not", "it's already indented lambda line, _x=[]: bool(_x or _x.append(1))) for", "capitalize_word(event): buffer = event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position", "prompts. \"\"\" # TODO: Remove the lines with no prompt", "'c') def capitalize_word(event): buffer = event.current_buffer text = buffer.text cursor_position", "= document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline and inside_string(text, row,", "ignoring whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent", "word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer", "is global, but that's fine, because # we consider any", "Move back to the beginning of the line, ignoring whitespace.", "issues when text grows or shrinks below, keeping the cursor", "\"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer =", "# Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer if", "emoji + [emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts = [r'\\", "not multiline: # Always accept a single valid line. Also", "matching: if opening.start == (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1])", "cursor_position = state[0] state.clear() if cursor_position < 0: cursor_position =", "for the whole # line. if m.col != col: continue", "Otherwise it will not # match (because all \\r's #", "if m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position =", "current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if", "document = buffer.document multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor", "backward_paragraph(event): \"\"\" Move back one paragraph of text \"\"\" text", "ending. # This part is the same as the default", "input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error: could", "matching, mismatching = matching_parens(text) for opening, closing in matching: if", "event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for m in", "['p', 'P', 'up', 'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape,", "two newlines for i, line in enumerate(lines): try: validate_text(line) except", "e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection:", "0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state =", "return event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape,", "cursor_position = event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) <", "event.current_buffer if buffer.selection_state: buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h')", "end of the buffer, accept unless we are in a", "= 2*(cursor_line - start_line + 1) if cursor_line >= end_line", "i, c in enumerate(word): if c.isalnum(): word = word[:i] +", "not find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0], \"error:\",", "lines = [data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]: # TODO:", "return p = positions[0] for pos in positions: if pos", "prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings from prompt_toolkit.keys", "from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs", "LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent else '' if PS1_PROMPTS_RE.match(data.strip())", "the end of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event):", "Emacs always keeps a newline at the end of the", "and removes PS2 prompts. \"\"\" # TODO: Remove the lines", "whitespace and cursor position. \"\"\" buffer = event.app.current_buffer # Avoid", "at cursor position. :param fire_event: Fire `on_text_insert` event. This is", "check=True) except FileNotFoundError: print(\"Error: could not find\", paste_command[0], file=sys.stderr) except", "= data.replace('\\t', ' ') # Strip prompts off pasted text", "def right_multiline(event): \"\"\" Right that wraps around in multiline. \"\"\"", "= True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None", "prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import", "else: lines.append(line[:min_indent] + '# ' + line[min_indent:]) else: lines.append(line) new_text", "for down because down is already at the end of", "@r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection())", "if document.selection: from_, to = document.selection_range() start_line, start_col = document.translate_index_to_position(from_)", "= cursor_position - m.start(0) break else: pos = buffer.cursor_position if", "uncomment = (all(not line.strip() or line[min_indent] == '#' for line", "= len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer = event.current_buffer text", "don't need a parallel for down because down is already", "the text is left alone. The resulting text is indented", "buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position +=", "below ps1_prompts = [r'>>>\\ '] + [re.escape(i) + r'\\[\\d+\\]:\\ '", "list of inputs Outputs are stripped. If no prompts are", "that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position - event.arg", "from prompt_toolkit.key_binding.bindings.named_commands import (accept_line, self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import", "= document.translate_index_to_position(buffer.cursor_position) row += 1 if not inside_string(event.current_buffer.text, row, col):", "= m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left)", "word.capitalize() because the first character could be # - or", "quoted strings (which will give a syntax error) accept_line(event) else:", "@r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right that wraps around in multiline.", "@r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\" When tab should insert whitespace,", "the end will be stripped by the prompt anyway. #", "= re.compile(r'\\S *(\\n *\\n)') @r.add_binding(Keys.Escape, '}') def forward_paragraph(event): \"\"\" Move", "Python, or IPython session and returns a list of inputs", "load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings, merge_key_bindings", "\"\"\" event.current_buffer.cursor_position = len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank", "blank lines. \"\"\" buffer = event.app.current_buffer document = buffer.document lines_up_to_current", "new_lines[row] buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft)", "= event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection())", "document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for i, line in", "event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def", "paste_command = ['xsel', '-b'] else: paste_command = ['pbpaste'] try: #", "# Replace all whitespace at the cursor (if any) with", "if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position()", "in iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark", "multiline, try to intelligently add a newline or execute. \"\"\"", "closing.end == (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position =", "return match.group('line') + '\\n' return '' def split_prompts(text, indent=''): r\"\"\"", "used to trigger autocompletion while typing. \"\"\" # Original text", "2*(cursor_line - start_line + 1) if cursor_line >= end_line -", "prompt_toolkit import __version__ as prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace,", "data, move_cursor=True): \"\"\" Insert characters at cursor position. :param fire_event:", "to the beginning \"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def", "if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return", "'accept_next', False) and ((len(pks) == 1 and pks[0].key == \"up\")", "if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg", "HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts))", "r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event): \"\"\" When tab should insert", "True if not positions or positions[0] >= buffer.cursor_position: return p", "@r.add_binding(Keys.Escape, '>') def end(event): \"\"\" Move to the end \"\"\"", "except FileNotFoundError: print(\"Error: could not find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError", "@r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer = event.current_buffer text = buffer.text", "[emoji_pudb]] + [r'In\\ \\[\\d+\\]:\\ '] ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?',", "should insert whitespace, do that instead of completion. \"\"\" #", "buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def", "= [] for (row, col, msg, m) in warnings: #", "accept unless we are in a # docstring row, col", "lines = [textwrap.indent(data, current_line_indent, # Don't indent the first line,", "down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if not down_position: buffer.cursor_position", "LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\"", "by indent, except for the first line. It is assumed", "in BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return", "rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] #", "matching_parens(text) for opening, closing in matching: if closing.end == (row,", "all whitespace. On third call, restore the original whitespace and", "newlines for i, line in enumerate(lines): try: validate_text(line) except SyntaxError:", "buffer = event.current_buffer text = buffer.text cursor_position = buffer.cursor_position pos", "Out[2]: 1 ... ... In [3]: def test(): ... ...:", "at the cursor. Remove it. cursor_position -= 1 text =", "have been stripped from # the string). (?P<line>.*)\\n # The", "buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer count", "not in multiline, execute. When in multiline, try to intelligently", "None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if", "Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel for down", "the history search text buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ')", "lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first element", "row, col): # We are inside a docstring auto_newline(event.current_buffer) else:", "cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text)", "insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a", "getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event): buffer", "+= '\\n' lines[0] = textwrap.indent(lines[0], indent, # Don't indent the", "for line in document.lines[start_line:end_line]: if not line.strip(): continue indent =", "b is set to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark", "= buffer.cursor_position # Don't overwrite the newline itself. Just before", "iTerm2 settings Keys.ControlQuestionmark = \"<C-?>\" ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash", "(?P<noprompt>(?(prompt)\\r|))? # If the prompt is not # matched, this", "else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs", "self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection()) @r.add_binding(Keys.ControlU, filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection()", "re.sub for clearing prompts Replaces PS1 prompts with \\r and", "toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer if buffer.selection_state:", "stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error: could not find\", paste_command[0], file=sys.stderr)", "delete any immediately following blank lines. \"\"\" buffer = event.app.current_buffer", "Delete up to the tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else:", "None @r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer count = event.arg", "worst that # happens here is that we resume when", "= True if not positions or positions[-1] <= buffer.cursor_position: return", "single space. state.append((text, cursor_position)) cursor_position -= len(text_before_cursor) - len(rstripped) -1", "if pks and getattr(pks[-1], 'accept_next', False) and ((len(pks) == 1", "if not line.strip(): blank_lines_before += 1 else: break blank_lines_after =", "row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if not inside_string(event.current_buffer.text,", "...: ... ''') ['a = 1', 'a', 'def test():\\n pass']", "2 if uncomment: buffer.cursor_position -= n_changed buffer.text = new_text else:", "buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state,", "prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit import", "paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[", "When not in multiline, execute. When in multiline, try to", "load_emacs_bindings(), load_emacs_search_bindings(), load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings", "text = buffer.text cursor_position = buffer.cursor_position pos = None for", "= (all(not line.strip() or line[min_indent] == '#' for line in", "1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False):", "break p = pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p", "def multiline_enter(event): \"\"\" When not in multiline, execute. When in", "n_changed = 2*(cursor_line - start_line + 1) if cursor_line >=", "2 and pks[0].key == \"escape\" and isinstance(pks[1].key, str) and pks[1].key", "custom_key_bindings, ]) r = custom_key_bindings = KeyBindings() def warning_positions(event): document", "\"\"\" Move to the beginning \"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape,", "event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer = event.current_buffer", "typing. \"\"\" # Original text & cursor position. otext =", "None for m in WORD.finditer(text): if m.end(0) > cursor_position: pos", "== len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1], new_lines[row]", "Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines correctly. # Gives", "< cursor_position: pos = cursor_position - m.start(0) break else: pos", "event.current_buffer document = buffer.document text = buffer.text row, col =", "breaks things). lines = ['\\n'.join(lines)] break if '\\n' in line.rstrip():", "-= 2 if uncomment: buffer.cursor_position -= n_changed buffer.text = new_text", "else: min_indent = 0 if min_indent == 0: break if", "event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left that wraps around in", "m) in warnings: # Handle SyntaxErrorMessage which is the same", "as we last left them. If either of those #", "already indented lambda line, _x=[]: bool(_x or _x.append(1))) for i", "elif not multiline: # Always accept a single valid line.", "return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to", "event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer = event.current_buffer", ".theme import emoji, emoji_pudb from .processors import get_pyflakes_warnings import re", "buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row > 0:", "Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer) @r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False)", "def transpose_lines(event): buffer = event.current_buffer document = buffer.document row =", "def toggle_selection(event): buffer = event.current_buffer if buffer.selection_state: buffer.selection_state = None", "the text contains no carriage returns (\\r). Trailing whitespace and", "e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def", "buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete", "for unclosed single # quoted strings (which will give a", "released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] =", "that # happens here is that we resume when we", "ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up)", "enumerate(document.lines): if start_line <= i < end_line: if uncomment: lines.append(line[:min_indent]", "load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings", "already indented lambda line, _x=[]: bool(_x or _x.append(1)))] else: lines", "inside_string(text, row, col): # We are inside a docstring auto_newline(event.current_buffer)", "and cursor position. \"\"\" buffer = event.app.current_buffer # Avoid issues", "= (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def", "cursor position text, cursor_position = state[0] state.clear() if cursor_position <", "1: new_text = '\\n'.join(stripped_before + stripped_after) elif blank_lines_before + blank_lines_after", "'\\n'.join(lines) # TODO: Set the cursor position correctly n_changed =", "outputs. Example: >>> split_prompts(''' ... In [1]: a = 1", "load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from", "pos = buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def", "pass ... ...: ... ''') ['a = 1', 'a', 'def", "match # the empty string. # Otherwise it will not", "except SyntaxError: # If there is a syntax error, we", "do that instead of completion. \"\"\" # Text before cursor", "blank_lines_before + blank_lines_after == 1: new_text = '\\n'.join(stripped_before + stripped_after)", "== 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before +", "= (Keys.Escape, Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True", "event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True", "= LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor) - indent.end(1) @r.add_binding(Keys.Backspace,", "event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position", "and cursor position as we last left them. If either", "False) and ((len(pks) == 1 and pks[0].key == \"up\") or", "\"Linux\" in platform.platform(): paste_command = ['xsel', '-b'] else: paste_command =", "+= n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app) @r.add_binding(Keys.ControlX, Keys.ControlS) @r.add_binding(Keys.ControlX,", "event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def end(event): \"\"\" Move to", "at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\\r|))?", "# have changed, reset. The state here is global, but", "False) def undo(event): event.current_buffer.undo() # Need to escape all spaces", "changed, reset. The state here is global, but that's fine,", "# the empty string. # Otherwise it will not #", "transpose_lines(event): buffer = event.current_buffer document = buffer.document row = document.cursor_position_row", "in a # docstring row, col = document.translate_index_to_position(buffer.cursor_position) row +=", "to the beginning of the line, ignoring whitespace. \"\"\" current_line", "# If we are at the end of the buffer,", "len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event):", "verbose (x) option below ps1_prompts = [r'>>>\\ '] + [re.escape(i)", "buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next = True", "m in WORD.finditer(text): if m.end(0) > cursor_position: event.current_buffer.cursor_position = m.end(0)", ".tokenize import inside_string, matching_parens from .theme import emoji, emoji_pudb from", "1', 'a', 'def test():\\n pass'] \"\"\" from .mypython import validate_text", "0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection())", "row += 1 if row == len(new_lines) - 1: new_lines.append('')", "of the line, ignoring whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line before_cursor", "def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip()", "-= 1 text = rstripped + lstripped state.append((text, cursor_position)) elif", "Don't use word.capitalize() because the first character could be #", "run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste)", "Replace tabs with four spaces (C-x C-y will still paste", "line in lines_after_current: if not line.strip(): blank_lines_after += 1 else:", "r\"\"\" repl function for re.sub for clearing prompts Replaces PS1", "merge_key_bindings from prompt_toolkit.keys import Keys, ALL_KEYS from prompt_toolkit.filters import Condition,", "file, but I don't # think it matters here. if", "Keys.Down) @r.add_binding(Keys.Escape, Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer =", "here because of verbose (x) option below ps1_prompts = [r'>>>\\", "text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'): # If we are at", "# Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at", "repl function for re.sub for clearing prompts Replaces PS1 prompts", "warning for the whole # line. if m.col != col:", "start_line <= i < end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:])", "blank # lines correctly. # Gives the positions right before", "data = data.replace('\\t', ' ') # Strip prompts off pasted", "@r.add_binding(Keys.ShiftRight) def select_right(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not", "Keys.ControlSlash # This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is #", "text_before_cursor.replace(' ', '').endswith('\\n'): # If we are at the end", "def forward_word(event): text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m", "line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl function for re.sub", "positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True if", "ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This won't work until #", "prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import", "-= n_changed buffer.text = new_text else: buffer.text = new_text buffer.cursor_position", "else: lines.append(line) new_text = '\\n'.join(lines) # TODO: Set the cursor", "stripped from # the string). (?P<line>.*)\\n # The actual line.", "we are in a # docstring row, col = document.translate_index_to_position(buffer.cursor_position)", "the file, but I don't # think it matters here.", "m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos = cursor_position", "paste_command = ['pbpaste'] try: # In Python 3.6 we can", "i in lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython", "\\r's # have been stripped from # the string). (?P<line>.*)\\n", "accept_line(event) elif not multiline: # Always accept a single valid", "= buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer", "Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_forward(count=event.arg,", "row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline and", "line.strip(): blank_lines_after += 1 else: break if not blank_lines_before: stripped_before", "# TODO: Send last chunk as bracketed paste, so it", "Move to the beginning \"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>')", "fine, because # we consider any change to be enough", "lines_after_current: if not line.strip(): blank_lines_after += 1 else: break if", "filter=tab_should_insert_whitespace) def indent(event): \"\"\" When tab should insert whitespace, do", "- len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def", "text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\" On blank line,", "if not blank_lines_before: stripped_before = lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before]", "delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event) @r.add_binding(Keys.ControlK, filter=HasSelection())", "In Python 3.6 we can do this: # run(paste_command, input=text,", "+= 1 else: start_line = cursor_line end_line = start_line +", "= event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent:", "[] for i, line in enumerate(document.lines): if start_line <= i", "row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent = indent.group(1) if indent", "opening.start == (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position =", "blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before", "event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor) -", "future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is set to C-S-/", "space at the cursor. Remove it. cursor_position -= 1 text", "'\\n' elif match.group('ps2prompt') is not None: return match.group('line') + '\\n'", "@r.add_binding(Keys.Up) def auto_up(event): buffer = event.current_buffer count = event.arg if", "= buffer.text ocpos = buffer.cursor_position # Don't overwrite the newline", "# marker group that will match # the empty string.", "def next_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning =", "a newline at the end of the file, but I", "[''] + stripped_after) # Even though we do auto_up, it", "len(text) - cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1) + 1", "Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer if buffer.document.text_before_cursor:", "out of sync cursor_position = buffer.cursor_position buffer.cursor_position = 0 buffer.text,", "def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event): event.current_buffer.undo()", "def prompt_repl(match): r\"\"\" repl function for re.sub for clearing prompts", "mainly used to trigger autocompletion while typing. \"\"\" # Original", "return '\\r' + match.group('line') + '\\n' elif match.group('ps2prompt') is not", "col = document.translate_index_to_position(buffer.cursor_position) row += 1 if multiline and inside_string(text,", "(?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? #", "insert mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text = otext[:ocpos]", "len(text) - m.end(1) + 1 return event.current_buffer.cursor_position = 0 WORD", "filter=HasSelection()) def delete_selection(event): event.current_buffer.cut_selection() @r.add_binding(Keys.Any, filter=HasSelection()) def self_insert_and_clear_selection(event): event.current_buffer.cut_selection() self_insert(event)", "surrounding blank lines, leaving just one. On isolated blank line,", "lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\" When not in", "return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\" On blank", "document.cursor_position_row new_lines = document.lines[:] if len(new_lines) == 1: new_lines.append('') if", "filter=HasSelection()) def kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if", "next_warning(event): positions = warning_positions(event) buffer = event.current_buffer buffer._show_syntax_warning = True", "True if not positions or positions[-1] <= buffer.cursor_position: return p", "Python 3.6 we can do this: # run(paste_command, input=text, encoding='utf-8')", "# Need to escape all spaces here because of verbose", "import subprocess import sys import textwrap import platform def get_key_bindings():", "+ c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position =", "The last element is the # buffer text and cursor", "IPython session and returns a list of inputs Outputs are", "= event.data buffer = event.current_buffer # Be sure to use", "text_before_cursor = text[:cursor_position] # The first element of state is", "1) end_line += 1 else: start_line = cursor_line end_line =", "document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line in lines_up_to_current[::-1]: if not", "if m.end(0) > cursor_position: pos = m.end(0) - cursor_position break", "') def cycle_spacing(event): \"\"\" Based on emacs's cycle-spacing On first", "TODO: Use event.previous_key_sequence instead. if state and state[-1] != (text,", "event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): if m.end(0)", "= min(min_indent, len(indent.group(1))) else: min_indent = 0 if min_indent ==", "= Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash #", "= event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not buffer.selection_state:", "\\n as line ending. # This part is the same", "= event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next', False) and ((len(pks)", "Remove it. cursor_position -= 1 text = rstripped + lstripped", "else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent)", "row == 0: buffer.cursor_down() row += 1 if row ==", "event.current_buffer text = buffer.text cursor_position = buffer.cursor_position for m in", "and blank_lines_after) or blank_lines_before + blank_lines_after == 1: new_text =", "reversed(positions): if pos <= buffer.cursor_position: break p = pos event.current_buffer.cursor_position", "text[:cursor_position] # The first element of state is the original", "the first line, it's already indented lambda line, _x=[]: bool(_x", "0: break if min_indent == float('inf'): min_indent = 0 uncomment", "+ r'\\[\\d+\\]:\\ ' for i, j in emoji + [emoji_pudb]]", "same as the default binding # Some terminals (Like iTerm2)", "lines = split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data, current_line_indent, #", "# Otherwise it will not # match (because all \\r's", "@r.add_binding(Keys.ControlC, filter=~is_searching) def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda:", "custom_key_bindings = KeyBindings() def warning_positions(event): document = event.current_buffer.document warnings =", "act like insert mode. overwritten_text = otext[ocpos:ocpos + len(data)] buffer.text", "file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], \"error:\", e, file=sys.stderr) def", "len(rstripped) -1 text = rstripped + ' ' + lstripped", "to = event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event):", "\"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This won't work until", "... ... In [3]: def test(): ... ...: pass ...", "buffer.text = new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event):", "# isspace doesn't respect vacuous truth if (not text_after_cursor or", "pos in positions: if pos >= buffer.cursor_position: break p =", "one or more blank lines BLANK_LINES = re.compile(r'\\S *(\\n *\\n)')", "+= 1 else: break if not blank_lines_before: stripped_before = lines_up_to_current", "col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position = new_pos return event.app.output.bell()", "for i in lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from", "backward_delete_char(event) # Reset the history search text buffer.history_search_text = None", "else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer = event.current_buffer buffer.selection_state", "import re import subprocess import sys import textwrap import platform", "' ' + lstripped state.append((text, cursor_position)) elif len(state) == 2:", "\"\"\" buffer = event.app.current_buffer document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1]", "blank line, delete that one. On nonblank line, delete any", "for line in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before += 1", "if not inside_string(event.current_buffer.text, row, col): indent = LEADING_WHITESPACE.match(document.current_line_before_cursor) current_line_indent =", "positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions = warning_positions(event)", "deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer =", "text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position] # The first", "!= (text, cursor_position): state.clear() if len(state) == 0: # Replace", "= buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position = do_cycle_spacing(buffer.text, cursor_position)", "remove all whitespace (if any) from around the cursor and", "['xsel', '-b'] else: paste_command = ['pbpaste'] try: # In Python", "Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace) def indent(event):", "warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for (row, col,", "execute. When in multiline, try to intelligently add a newline", "= KeyBindings() def warning_positions(event): document = event.current_buffer.document warnings = get_pyflakes_warnings(document.text,", "def kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position =", "Avoid issues when text grows or shrinks below, keeping the", "lines with no prompt if match.group('ps1prompt') is not None: return", "word[i+1:].lower() break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF)", "= event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if", "undo(event): event.current_buffer.undo() # Need to escape all spaces here because", "# breaks things). lines = ['\\n'.join(lines)] break if '\\n' in", "or text_after_cursor.isspace()) and text_before_cursor.replace(' ', '').endswith('\\n'): # If we are", "move_cursor=True): \"\"\" Insert characters at cursor position. :param fire_event: Fire", "event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if m.start(0)", "re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match prompts", "if m.start(0) < cursor_position: pos = cursor_position - m.start(0) break", "be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event) @r.add_binding(Keys.Escape, ';') def comment(event):", "strings (which will give a syntax error) accept_line(event) else: auto_newline(event.current_buffer)", "On second call, remove all whitespace. On third call, restore", "pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position", "call, restore the original whitespace and cursor position. \"\"\" buffer", "lines_up_to_current[::-1]: if not line.strip(): blank_lines_before += 1 else: break blank_lines_after", "# This part is the same as the default binding", "in reversed(positions): if pos <= buffer.cursor_position: break p = pos", "== 1: new_lines.append('') if row == 0: buffer.cursor_down() row +=", "\"\"\" current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line)", "buffer = event.current_buffer document = buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position)", "not # match (because all \\r's # have been stripped", "happens here is that we resume when we shouldn't if", "or execute. \"\"\" buffer = event.current_buffer document = buffer.document multiline", "in WORD.finditer(text): if m.end(0) > cursor_position: pos = m.end(0) -", "If we are at the end of the buffer, accept", "input=text, encoding='utf-8') p = subprocess.run(paste_command, stdout=subprocess.PIPE, check=True) except FileNotFoundError: print(\"Error:", "ANSI_SEQUENCES['\\x1b\"5/'] = Keys.ControlSlash # This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484", "# The first element of state is the original text.", "beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def select_left(event): buffer = event.current_buffer", "indentation for the comment delimiters min_indent = float('inf') for line", "all surrounding blank lines, leaving just one. On isolated blank", "delete that one. On nonblank line, delete any immediately following", "except subprocess.CalledProcessError as e: print(copy_command[0], \"error:\", e, file=sys.stderr) def system_paste():", "0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if", "doesn't toggle correctly @r.add_binding(Keys.ControlSpace) def toggle_selection(event): buffer = event.current_buffer if", "cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_, to = document.selection_range()", "cursor_position): state.clear() if len(state) == 0: # Replace all whitespace", "p = pos event.current_buffer.cursor_position = p # This can be", "= 0 for line in lines_up_to_current[::-1]: if not line.strip(): blank_lines_before", "for opening, closing in matching: if closing.end == (row, col):", "print(\"Error: could not find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e:", "';') def comment(event): buffer = event.current_buffer document = buffer.document cursor_line,", "\"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\" Right", "reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: pos = cursor_position - m.start(0)", "left off. # TODO: Use event.previous_key_sequence instead. if state and", "new_text = '\\n'.join(stripped_before + [''] + stripped_after) # Even though", "the cursor position correctly n_changed = 2*(cursor_line - start_line +", "left alone. The resulting text is indented by indent, except", "= event.current_buffer count = event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count)", "PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline and verbose", "[3]: def test(): ... ...: pass ... ...: ... ''')", "kill_selection(event): data = event.current_buffer.cut_selection() event.app.clipboard.set_data(data) def system_copy(text): if \"Linux\" in", "if buffer.document.text_before_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position", "# This can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in", "state.append((text, cursor_position)) elif len(state) == 2: # Exactly one space", "not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position", "at the end will be stripped by the prompt anyway.", "cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, '{')", "parallel for down because down is already at the end", "cycle_spacing(event): \"\"\" Based on emacs's cycle-spacing On first call, remove", "all \\r's # have been stripped from # the string).", "@r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text))", "1 and pks[0].key == \"up\") or (len(pks) == 2 and", "\"up\") or (len(pks) == 2 and pks[0].key == \"escape\" and", "if not line.strip(): continue indent = LEADING_WHITESPACE.search(line) if indent: min_indent", "previous key was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't", "be out of bounds from trailing # whitespace buffer.cursor_position =", "event.app.current_buffer document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:]", "lines.append(line) new_text = '\\n'.join(lines) # TODO: Set the cursor position", "break insert_text_ovewrite(buffer, word) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def", "float('inf') for line in document.lines[start_line:end_line]: if not line.strip(): continue indent", "blank line, delete all surrounding blank lines, leaving just one.", "len(new_text)) buffer.text = new_text @r.add_binding(Keys.ControlX, Keys.ControlT) def transpose_lines(event): buffer =", "= re.compile('|'.join(ps1_prompts)) PS2_PROMPTS_RE = re.compile('|'.join(ps2_prompts)) PROMPTED_TEXT_RE = re.compile(r'''(?x) # Multiline", "+ stripped_after) elif blank_lines_before + blank_lines_after == 0: return else:", "continue pos = document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p')", "event.current_buffer.cursor_position for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position", "up_position if not up_position: buffer.cursor_position = 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event):", "def cycle_spacing(event): \"\"\" Based on emacs's cycle-spacing On first call,", "min_indent = float('inf') for line in document.lines[start_line:end_line]: if not line.strip():", "<= buffer.cursor_position: return p = positions[-1] for pos in reversed(positions):", "when text grows or shrinks below, keeping the cursor #", "position correctly n_changed = 2*(cursor_line - start_line + 1) if", "Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter", "state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor = text[:cursor_position]", "opening, closing in matching: if closing.end == (row, col): new_pos", "any) from around the cursor and replace it with a", "any immediately following blank lines. \"\"\" buffer = event.app.current_buffer document", "uncomment: buffer.cursor_position -= n_changed buffer.text = new_text else: buffer.text =", "newline or execute. \"\"\" buffer = event.current_buffer document = buffer.document", "= buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 matching,", "buffer.text # isspace doesn't respect vacuous truth if (not text_after_cursor", "event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace,", "None: return '\\r' + match.group('line') + '\\n' elif match.group('ps2prompt') is", "event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False) def undo(event): event.current_buffer.undo() # Need", "Get the indentation for the comment delimiters min_indent = float('inf')", "0: buffer.cursor_down() row += 1 if row == len(new_lines) -", "buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer", "buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row += 1 if not", "event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None", "because # we consider any change to be enough clear", "buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert characters at", "lines, leaving just one. On isolated blank line, delete that", "because down is already at the end of the #", "ending, # it should act like insert mode. overwritten_text =", "of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A'] = (Keys.Escape, Keys.Up) ANSI_SEQUENCES['\\x1b[1;9B'] = (Keys.Escape, Keys.Down)", "BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position = m.start(1)+1 return event.current_buffer.cursor_position", "Right that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position +", "a # docstring row, col = document.translate_index_to_position(buffer.cursor_position) row += 1", "at the end of the buffer, accept unless we are", "not line.strip(): continue indent = LEADING_WHITESPACE.search(line) if indent: min_indent =", "add a newline or execute. \"\"\" buffer = event.current_buffer document", "+ event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\",", "seem to paste \\r\\n line endings in a # bracketed", "group that will match # the empty string. # Otherwise", "= [data] event.current_buffer.insert_text(lines[0]) for text in lines[1:]: # TODO: Send", "indent the first line, it's already indented lambda line, _x=[]:", "matching_parens(text) for opening, closing in matching: if opening.start == (row,", "pasted text document = buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row", "\"\"\" When not in multiline, execute. When in multiline, try", "+ '\\n' text = textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r') #", "line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for i,", "event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event): event.app.exit(exception=EOFError, style='class:exiting') @r.add_binding(Keys.ControlC, filter=~is_searching)", "\\[\\d+\\]:\\ '] ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING", "def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g is set", "def delete_blank_lines(event): \"\"\" On blank line, delete all surrounding blank", "else: break blank_lines_after = 0 for line in lines_after_current: if", "[r'In\\ \\[\\d+\\]:\\ '] ps2_prompts = [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?',", "is mainly used to trigger autocompletion while typing. \"\"\" #", "'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text = event.current_buffer.text cursor_position =", "if event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position -= event.arg if", "def left_multiline(event): \"\"\" Left that wraps around in multiline. \"\"\"", "['a = 1', 'a', 'def test():\\n pass'] \"\"\" from .mypython", "prompt anyway. # This just makes this function easier to", "new_lines = document.lines[:] if len(new_lines) == 1: new_lines.append('') if row", "we resume when we shouldn't if things look exactly #", "p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if event.current_buffer.document.selection: from_, to =", "Move back one paragraph of text \"\"\" text = event.current_buffer.text", "where we left off. # TODO: Use event.previous_key_sequence instead. if", "= ['xsel', '-b'] else: paste_command = ['pbpaste'] try: # In", "already at the end of the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable)", "# If there is a syntax error, we can't use", "indent, except for the first line. It is assumed that", "could not find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(paste_command[0],", "any change to be enough clear the state. The worst", "= 0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor:", "a newline or execute. \"\"\" buffer = event.current_buffer document =", "exactly) data = data.replace('\\t', ' ') # Strip prompts off", "*)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\" Move back to", "Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result())))", "document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to - 1) end_line += 1", "event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position -= event.arg if getattr(event.current_buffer.selection_state,", "(?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\\r|))? # If the prompt", "= 1', 'a', 'def test():\\n pass'] \"\"\" from .mypython import", "''.join(document.lines[start_line:end_line]).strip()) lines = [] for i, line in enumerate(document.lines): if", "if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True up_position = buffer.document.get_cursor_up_position()", "https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released version of prompt-toolkit. ANSI_SEQUENCES['\\x1b[1;9A']", "len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer = event.current_buffer document =", "things). lines = ['\\n'.join(lines)] break if '\\n' in line.rstrip(): lines[i]", "check=True) except FileNotFoundError: print(\"Error: could not find\", copy_command[0], file=sys.stderr) except", "= matching_parens(text) for opening, closing in matching: if opening.start ==", "event.current_buffer.document warnings = get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for (row,", "multiline. \"\"\" if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position +=", "On nonblank line, delete any immediately following blank lines. \"\"\"", "tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens from .theme import", "new_pos return event.app.output.bell() @r.add_binding(Keys.Left) def left_multiline(event): \"\"\" Left that wraps", "four spaces (C-x C-y will still paste the text exactly)", "buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg @r.add_binding(Keys.Up) def auto_up(event):", "= event.current_buffer count = event.arg if buffer.document.cursor_position_row < buffer.document.line_count -", "the beginning \"\"\" event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, '>') def end(event):", "blank_lines_before + blank_lines_after == 0: return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text", "Insert characters at cursor position. :param fire_event: Fire `on_text_insert` event.", "'down'])): accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event):", "# https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark,", "as line ending. # This part is the same as", "error) accept_line(event) else: auto_newline(event.current_buffer) # Always accept the line if", "# Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines correctly. #", "buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_, to =", "import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import KeyBindings,", "sync cursor_position = buffer.cursor_position buffer.cursor_position = 0 buffer.text, buffer.cursor_position =", "# it should act like insert mode. overwritten_text = otext[ocpos:ocpos", "... In [3]: def test(): ... ...: pass ... ...:", "a single valid line. Also occurs for unclosed single #", "off pasted text document = buffer.document row, col = document.translate_index_to_position(buffer.cursor_position)", "= buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because the first character", "match (because all \\r's # have been stripped from #", "The state here is global, but that's fine, because #", "we can do this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command,", "buffer.cursor_down() row += 1 if row == len(new_lines) - 1:", "sys import textwrap import platform def get_key_bindings(): # Based on", "When in multiline, try to intelligently add a newline or", "event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g is set to S-Enter", "if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event):", "text and cursor position text, cursor_position = state[0] state.clear() if", "buffer.selection_state.shift_arrow = True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer", "> len(text) - cursor_position: event.current_buffer.cursor_position = len(text) - m.end(1) +", "'\\r' + match.group('line') + '\\n' elif match.group('ps2prompt') is not None:", "TODO: Remove the lines with no prompt if match.group('ps1prompt') is", "len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event):", "event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete up to", "@r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True buffer", "run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a b is", "forward one paragraph of text \"\"\" text = event.current_buffer.text cursor_position", "getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.Right) def right_multiline(event): \"\"\"", "SyntaxError: # If there is a syntax error, we can't", "text document = buffer.document row, col = document.translate_index_to_position(buffer.cursor_position) row +=", "# TabShouldInsertWhitespaceFilter. before_cursor = event.app.current_buffer.document.current_line_before_cursor event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE", "cursor position. :param fire_event: Fire `on_text_insert` event. This is mainly", "'# ' + line[min_indent:]) else: lines.append(line) new_text = '\\n'.join(lines) #", "if not line.strip(): blank_lines_after += 1 else: break if not", "trailing # whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text = new_text", "as e: print(copy_command[0], \"error:\", e, file=sys.stderr) def system_paste(): if \"Linux\"", "stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs always keeps a newline", "element of state is the original text. The last element", "buffer.selection_state.shift_arrow = True down_position = buffer.document.get_cursor_down_position() buffer.cursor_position += down_position if", "= document.translate_index_to_position(buffer.cursor_position) row += 1 matching, mismatching = matching_parens(text) for", "for pos in positions: if pos >= buffer.cursor_position: break p", "cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped =", "== 1: new_text = '\\n'.join(stripped_before + stripped_after) elif blank_lines_before +", "auto_down(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row <", "document.text_before_cursor text = buffer.text # isspace doesn't respect vacuous truth", "or (len(pks) == 2 and pks[0].key == \"escape\" and isinstance(pks[1].key,", "Gives the positions right before one or more blank lines", "match.group('line') + '\\n' elif match.group('ps2prompt') is not None: return match.group('line')", "there is a syntax error, we can't use the CMD_QUEUE", "?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE =", "in positions: if pos >= buffer.cursor_position: break p = pos", "# buffer text and cursor position as we last left", "if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True buffer.cursor_position += event.arg", "break blank_lines_after = 0 for line in lines_after_current: if not", "elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state =", "cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text): if m.start(0) >", "in lines] return lines @r.add_binding(Keys.BracketedPaste) def bracketed_paste(event): from .mypython import", "emoji, emoji_pudb from .processors import get_pyflakes_warnings import re import subprocess", "paste, so it can be edited CMD_QUEUE.append(text) if CMD_QUEUE: accept_line(event)", "(\\r). Trailing whitespace and newlines is stripped from the outputs.", "change to be enough clear the state. The worst that", "= True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions", "@r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions = warning_positions(event) buffer = event.current_buffer", "len(text) return text, cursor_position @r.add_binding(Keys.ControlX, Keys.ControlO) def delete_blank_lines(event): \"\"\" On", "== (row, col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos", "back_to_indentation(event): \"\"\" Move back to the beginning of the line,", "0 uncomment = (all(not line.strip() or line[min_indent] == '#' for", "= start_line + 1 # Get the indentation for the", "True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not up_position:", "'a', 'def test():\\n pass'] \"\"\" from .mypython import validate_text text", "== 0: break if min_indent == float('inf'): min_indent = 0", "load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import load_mouse_bindings from prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings", "m.end(1) + 1 return event.current_buffer.cursor_position = 0 WORD = re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)')", "prompts at the front | (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line.", "select_all(event): buffer = event.current_buffer buffer.selection_state = SelectionState(len(buffer.document.text)) buffer.cursor_position = 0", "SyntaxErrorMessage which is the same warning for the whole #", "event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def next_warning(event):", "is left alone. The resulting text is indented by indent,", "lines[i] += '\\n' lines[0] = textwrap.indent(lines[0], indent, # Don't indent", "load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings = KeyBindings() def warning_positions(event):", "def comment(event): buffer = event.current_buffer document = buffer.document cursor_line, cursor_col", "Exactly one space at the cursor. Remove it. cursor_position -=", "col): new_pos = document.translate_row_col_to_index(closing.end[0]-1, closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell()", "= \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if", "buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) # Delete up to the tab", "ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3': r.add_binding(Keys.ShiftEnter)(accept_line) @r.add_binding(Keys.Tab, filter=tab_should_insert_whitespace)", "actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match): r\"\"\" repl function for", "up to the tab stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event)", "leaving just one. On isolated blank line, delete that one.", "backward_sexp(event): buffer = event.current_buffer document = buffer.document text = buffer.text", "buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0", "if uncomment: buffer.cursor_position -= n_changed buffer.text = new_text else: buffer.text", "paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda future:\\ event.current_buffer.paste_clipboard_data(ClipboardData(future.result()))) # M-[ a", "@r.add_binding(Keys.ControlO) def open_line(event): event.current_buffer.newline(copy_margin=False) event.current_buffer.cursor_left() # M-[ a g is", "lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX:", "= 1 ... ... In [2]: a ... Out[2]: 1", "document.selection: from_, to = document.selection_range() start_line, start_col = document.translate_index_to_position(from_) end_line,", "text in lines[1:]: # TODO: Send last chunk as bracketed", "down_position: buffer.cursor_position = len(buffer.document.text) # The default doesn't toggle correctly", "@r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event): buffer = event.current_buffer text = buffer.text", "current_line_indent) else: lines = [textwrap.indent(data, current_line_indent, # Don't indent the", "stop buffer.delete_before_cursor(count=4 + spaces%-4) else: backward_delete_char(event) # Reset the history", "+ lstripped state.append((text, cursor_position)) elif len(state) == 2: # Exactly", "@r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if not", "= [r'\\ *\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\", "= [] for i, line in enumerate(document.lines): if start_line <=", "don't # think it matters here. if (not blank_lines_before and", "= '\\n'.join(lines) # TODO: Set the cursor position correctly n_changed", "for i, line in enumerate(lines): try: validate_text(line) except SyntaxError: #", "first line. It is assumed that the text contains no", "intelligently add a newline or execute. \"\"\" buffer = event.current_buffer", "data = data.replace('\\r', '\\n') # Replace tabs with four spaces", "1 ... ... In [2]: a ... Out[2]: 1 ...", "out of bounds from trailing # whitespace buffer.cursor_position = min(buffer.cursor_position,", "return else: buffer.cursor_up(max(blank_lines_before-1, 0)) new_text = '\\n'.join(stripped_before + [''] +", "not buffer.selection_state: event.key_sequence[-1].accept_next = True buffer.history_backward(count=count) if getattr(buffer.selection_state, \"shift_arrow\", False):", "text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]):", "*\\.\\.\\.:\\ ?', r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE", "bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data = data.replace('\\r\\n', '\\n') data =", "# Restore original text and cursor position text, cursor_position =", "try to intelligently add a newline or execute. \"\"\" buffer", "buffer.document.text_after_cursor: if not buffer.selection_state: buffer.start_selection() buffer.selection_state.shift_arrow = True down_position =", "all spaces here because of verbose (x) option below ps1_prompts", "stripped_after) # Even though we do auto_up, it can be", "the end of the file, but I don't # think", "in multiline, execute. When in multiline, try to intelligently add", "cursor. Remove it. cursor_position -= 1 text = rstripped +", "not positions or positions[-1] <= buffer.cursor_position: return p = positions[-1]", "= state[0] state.clear() if cursor_position < 0: cursor_position = 0", "Need to escape all spaces here because of verbose (x)", "import get_pyflakes_warnings import re import subprocess import sys import textwrap", "prompts off pasted text document = buffer.document row, col =", "buffer.selection_state = None @r.add_binding(Keys.Down) def auto_down(event): buffer = event.current_buffer count", "buffer.text = '\\n'.join(new_lines) buffer.cursor_down() beginning_of_line(event) # Selection stuff @r.add_binding(Keys.ShiftLeft) def", "True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def next_warning(event): positions =", "get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\" When not in multiline,", "the # prompt. @r.add_binding(Keys.Enter, filter=is_returnable) def accept_after_history_backward(event): pks = event.previous_key_sequence", "positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions = warning_positions(event) buffer =", "= buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u')", "'] + [re.escape(i) + r'\\[\\d+\\]:\\ ' for i, j in", "# Text before cursor on the line must be whitespace", "textwrap.indent(lines[0], indent, # Don't indent the first line, it's already", "makes this function easier to test. lines = [i.rstrip() for", "load_emacs_page_navigation_bindings(), load_mouse_bindings(), load_cpr_bindings(), custom_key_bindings, ]) r = custom_key_bindings = KeyBindings()", "except for the first line. It is assumed that the", "-= len(text_before_cursor) - len(rstripped) -1 text = rstripped + '", "textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r') # Make sure multilines end", "string). (?P<line>.*)\\n # The actual line. '''.format(PS1_PROMPTS_RE=PS1_PROMPTS_RE, PS2_PROMPTS_RE=PS2_PROMPTS_RE)) def prompt_repl(match):", "m.start(0) < cursor_position: pos = cursor_position - m.start(0) break else:", "1 ... ... In [3]: def test(): ... ...: pass", "for m in BLANK_LINES.finditer(text): if m.start(0) > cursor_position: event.current_buffer.cursor_position =", "= document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line", "start_col = document.translate_index_to_position(from_) end_line, end_col = document.translate_index_to_position(to - 1) end_line", "exactly # as they did where we left off. #", "in multiline. \"\"\" if event.current_buffer.cursor_position - event.arg >= 0: event.current_buffer.cursor_position", "SelectionState(len(buffer.document.text)) buffer.cursor_position = 0 @r.add_binding(Keys.Delete, filter=HasSelection()) @r.add_binding(Keys.Backspace, filter=HasSelection()) def delete_selection(event):", "+ line[min_indent+2:]) else: lines.append(line[:min_indent] + '# ' + line[min_indent:]) else:", "buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not up_position: buffer.cursor_position = 0", "search text buffer.history_search_text = None @r.add_binding(Keys.Escape, ' ') def cycle_spacing(event):", "as bracketed paste, so it can be edited CMD_QUEUE.append(text) if", ".multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import inside_string, matching_parens", "the lines with no prompt if match.group('ps1prompt') is not None:", "'\\n') # Replace tabs with four spaces (C-x C-y will", "treat multiple blank # lines correctly. # Gives the positions", "and ''.join(document.lines[start_line:end_line]).strip()) lines = [] for i, line in enumerate(document.lines):", "event.app.current_buffer.insert_text(' '*(4 - len(before_cursor)%4)) LEADING_WHITESPACE = re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape,", "if buffer.document.cursor_position_row < buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not buffer.selection_state:", "carriage returns (\\r). Trailing whitespace and newlines is stripped from", "isspace doesn't respect vacuous truth if (not text_after_cursor or text_after_cursor.isspace())", "= len(event.current_buffer.text) # Document.start_of_paragraph/end_of_paragraph don't treat multiple blank # lines", "insert whitespace, do that instead of completion. \"\"\" # Text", "original whitespace and cursor position. \"\"\" buffer = event.app.current_buffer #", "\"Linux\" in platform.platform(): copy_command = ['xclip', '-selection', 'c'] else: copy_command", "mismatching = matching_parens(text) for opening, closing in matching: if closing.end", "of state is the original text. The last element is", "0 @r.add_binding(Keys.Escape, 'd') def kill_word(event): buffer = event.current_buffer text =", "emoji_pudb from .processors import get_pyflakes_warnings import re import subprocess import", "1 if multiline and inside_string(text, row, col): # We are", "'\\n' lines[0] = textwrap.indent(lines[0], indent, # Don't indent the first", "_x=[]: bool(_x or _x.append(1))) for i in range(1, len(lines)): lines[i]", "str) and pks[1].key in ['p', 'P', 'up', 'down'])): accept_line(event) else:", "warnings: # Handle SyntaxErrorMessage which is the same warning for", "cursor # position out of sync cursor_position = buffer.cursor_position buffer.cursor_position", "def system_paste(): if \"Linux\" in platform.platform(): paste_command = ['xsel', '-b']", "event.current_buffer.document.selection_range() run_in_terminal(lambda:system_copy(event.current_buffer.document.text[from_:to + 1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future =", "Always accept a single valid line. Also occurs for unclosed", "line, ignoring whitespace. \"\"\" current_line = event.app.current_buffer.document.current_line before_cursor = event.app.current_buffer.document.current_line_before_cursor", "subprocess.run(copy_command, input=text.encode('utf-8'), check=True) except FileNotFoundError: print(\"Error: could not find\", copy_command[0],", "1 if row == len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1]", "if not positions or positions[-1] <= buffer.cursor_position: return p =", "is a syntax error, we can't use the CMD_QUEUE (it", "use word.capitalize() because the first character could be # -", "the empty string. # Otherwise it will not # match", "validate_text(line) except SyntaxError: # If there is a syntax error,", "pos = document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def", "when we shouldn't if things look exactly # as they", "= event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in BLANK_LINES.finditer(text[::-1]): if", "is that we resume when we shouldn't if things look", "option below ps1_prompts = [r'>>>\\ '] + [re.escape(i) + r'\\[\\d+\\]:\\", "to S-Enter in iTerm2 settings Keys.ShiftEnter = \"<Shift-Enter>\" ALL_KEYS.append('<Shift-Enter>') ANSI_SEQUENCES['\\x1b[ag']", "import get_app from prompt_toolkit.application import run_in_terminal from prompt_toolkit import __version__", "restore the original whitespace and cursor position. \"\"\" buffer =", "closing.end[1]) buffer.cursor_position = new_pos return event.app.output.bell() @r.add_binding(Keys.Escape, Keys.ControlB) def backward_sexp(event):", "text & cursor position. otext = buffer.text ocpos = buffer.cursor_position", "def beginning(event): \"\"\" Move to the beginning \"\"\" event.current_buffer.cursor_position =", "at the cursor (if any) with a single space. state.append((text,", "just one. On isolated blank line, delete that one. On", "blank_lines_after = 0 for line in lines_after_current: if not line.strip():", "prompt_toolkit.key_binding.bindings.cpr import load_cpr_bindings from prompt_toolkit.key_binding.bindings.page_navigation import load_emacs_page_navigation_bindings from prompt_toolkit.key_binding import", "FileNotFoundError: print(\"Error: could not find\", paste_command[0], file=sys.stderr) except subprocess.CalledProcessError as", "a parallel for down because down is already at the", "try: validate_text(line) except SyntaxError: # If there is a syntax", "history_search=True) @r.add_binding(Keys.Escape, 'P') @r.add_binding(Keys.Escape, Keys.Down) def forward_history_search(event): event.key_sequence[-1].accept_next = True", "do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped", "from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser import ANSI_SEQUENCES from prompt_toolkit.application.current", "def keyboard_interrupt(event): event.app.exit(exception=KeyboardInterrupt, style='class:aborting') is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter,", "though we do auto_up, it can be out of bounds", "return p = positions[-1] for pos in reversed(positions): if pos", "(if any) from around the cursor and replace it with", "WORD.finditer(text): pos = m.end(0) if pos > cursor_position: word =", "= do_cycle_spacing(buffer.text, cursor_position) def do_cycle_spacing(text, cursor_position, state=[]): rstripped = text[:cursor_position].rstrip()", "> cursor_position: word = buffer.document.text[cursor_position:pos] # Don't use word.capitalize() because", "indent(event): \"\"\" When tab should insert whitespace, do that instead", "if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def backward_kill_word(event):", "= True buffer.cursor_position -= event.arg @r.add_binding(Keys.ShiftRight) def select_right(event): buffer =", "otext[:ocpos] + data + otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position", "test():\\n pass'] \"\"\" from .mypython import validate_text text = textwrap.dedent(text).strip()", "document.lines[start_line:end_line]: if not line.strip(): continue indent = LEADING_WHITESPACE.search(line) if indent:", "= m.end(0) if pos > cursor_position: word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer,", "the prompt anyway. # This just makes this function easier", "one space at the cursor. Remove it. cursor_position -= 1", "forward_word(event): text = event.current_buffer.text cursor_position = event.current_buffer.cursor_position for m in", "if indent else '' if PS1_PROMPTS_RE.match(data.strip()) or PS2_PROMPTS_RE.match(data.strip()): lines =", "blank_lines_before: stripped_before = lines_up_to_current else: stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after =", "[r'>>>\\ '] + [re.escape(i) + r'\\[\\d+\\]:\\ ' for i, j", "new_lines.append('') if row == 0: buffer.cursor_down() row += 1 if", "< cursor_position: event.current_buffer.cursor_position = m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape,", "= m.start(0) return event.current_buffer.cursor_position = 0 @r.add_binding(Keys.Escape, 'd') def kill_word(event):", "of those # have changed, reset. The state here is", "# Don't indent the first line, it's already indented lambda", "min_indent = 0 uncomment = (all(not line.strip() or line[min_indent] ==", "else: lines = [textwrap.indent(data, current_line_indent, # Don't indent the first", "blank_lines_after) or blank_lines_before + blank_lines_after == 1: new_text = '\\n'.join(stripped_before", "XXX: Emacs always keeps a newline at the end of", "from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData from prompt_toolkit.input.vt100_parser", "clearing prompts Replaces PS1 prompts with \\r and removes PS2", "= re.compile(r'''(?x) # Multiline and verbose (?P<prompt> (?P<ps1prompt>{PS1_PROMPTS_RE.pattern}) # Match", "insert_text_ovewrite(buffer, data, move_cursor=True): \"\"\" Insert characters at cursor position. :param", "immediately following blank lines. \"\"\" buffer = event.app.current_buffer document =", "line, it's already indented lambda line, _x=[]: bool(_x or _x.append(1)))", "end_line, end_col = document.translate_index_to_position(to - 1) end_line += 1 else:", "break if pos: deleted = buffer.delete(count=pos) event.app.clipboard.set_text(deleted) @r.add_binding(Keys.Escape, Keys.Backspace) def", "cycle-spacing On first call, remove all whitespace (if any) from", "!= col: continue pos = document.translate_row_col_to_index(row, col) positions.append(pos) return positions", "or PS2_PROMPTS_RE.match(data.strip()): lines = split_prompts(data, current_line_indent) else: lines = [textwrap.indent(data,", "copied from mypython, Python, or IPython session and returns a", "= positions[-1] for pos in reversed(positions): if pos <= buffer.cursor_position:", "document = buffer.document cursor_line, cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_,", "word = word[:i] + c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer, word)", "if min_indent == 0: break if min_indent == float('inf'): min_indent", "= buffer.cursor_position if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer,", "delete_char_or_unindent(event): buffer = event.app.current_buffer if buffer.document.current_line_before_cursor.isspace(): spaces = len(buffer.document.current_line_before_cursor) #", "document = buffer.document lines_up_to_current = document.lines[:document.cursor_position_row+1] lines_after_current = document.lines[document.cursor_position_row+1:] blank_lines_before", "return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'c') def capitalize_word(event): buffer =", "around in multiline. \"\"\" if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text):", "while typing. \"\"\" # Original text & cursor position. otext", "was Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a", "if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state = None @r.add_binding(Keys.ControlD) def exit(event):", "event.arg if buffer.document.cursor_position_row > 0: buffer.cursor_up(count=count) elif not buffer.selection_state: event.key_sequence[-1].accept_next", "= len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event): \"\"\" Move", "is a special # marker group that will match #", "col): # We are inside a docstring auto_newline(event.current_buffer) else: accept_line(event)", "import load_basic_bindings from prompt_toolkit.key_binding.bindings.emacs import load_emacs_bindings, load_emacs_search_bindings from prompt_toolkit.key_binding.bindings.mouse import", "r'\\.\\.\\.\\ ?', '\\N{CLAPPING HANDS SIGN}+\\\\ ?⎢\\\\ ?'] PS1_PROMPTS_RE = re.compile('|'.join(ps1_prompts))", "return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape, Keys.Left) def backward_word(event):", "= buffer.document multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor =", "a single space. On second call, remove all whitespace. On", "is the original text. The last element is the #", "from trailing # whitespace buffer.cursor_position = min(buffer.cursor_position, len(new_text)) buffer.text =", "buffer.document.line_count - 1: buffer.cursor_down(count=count) elif not buffer.selection_state: buffer.history_forward(count=count) if getattr(buffer.selection_state,", "are in a # docstring row, col = document.translate_index_to_position(buffer.cursor_position) row", "for pos in reversed(positions): if pos <= buffer.cursor_position: break p", "m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position = m.start(0)", "or positions[0] >= buffer.cursor_position: return p = positions[0] for pos", "word = buffer.document.text[cursor_position:pos] insert_text_ovewrite(buffer, word.upper()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape,", "self_insert, backward_delete_char, beginning_of_line) from prompt_toolkit.key_binding.bindings.basic import if_no_repeat from prompt_toolkit.key_binding.bindings.basic import", "ANSI_SEQUENCES['\\x1b[ag'] = Keys.ShiftEnter ANSI_SEQUENCES['\\x1bOM'] = Keys.ShiftEnter if prompt_toolkit_version[0] != '3':", "bracketed paste, so it can be edited CMD_QUEUE.append(text) if CMD_QUEUE:", "single valid line. Also occurs for unclosed single # quoted", "whitespace (if any) from around the cursor and replace it", "cursor_position = event.current_buffer.cursor_position for m in WORD.finditer(text): if m.end(0) >", "# - or _ for i, c in enumerate(word): if", "are stripped. If no prompts are found the text is", "indented by indent, except for the first line. It is", "prompt_toolkit_version from .multiline import (auto_newline, tab_should_insert_whitespace, document_is_multiline_python) from .tokenize import", "we can do this: # run(paste_command, input=text, encoding='utf-8') p =", "= Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\" When", "line in document.lines[start_line:end_line]: if not line.strip(): continue indent = LEADING_WHITESPACE.search(line)", "the newline itself. Just before the line ending, # it", "Replace all whitespace at the cursor (if any) with a", "= event.app.current_buffer.document.current_line_before_cursor indent = LEADING_WHITESPACE.search(current_line) if indent: event.app.current_buffer.cursor_position -= len(before_cursor)", "(row, col, msg, m) in warnings: # Handle SyntaxErrorMessage which", "Just before the line ending, # it should act like", "or _x.append(1))) for i in range(1, len(lines)): lines[i] = textwrap.indent(lines[i],", "return '' def split_prompts(text, indent=''): r\"\"\" Takes text copied from", "can be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a #", "for m in reversed(list(WORD.finditer(text))): if m.start(0) < cursor_position: event.current_buffer.cursor_position =", "@r.add_binding(Keys.Escape, ' ') def cycle_spacing(event): \"\"\" Based on emacs's cycle-spacing", "if getattr(buffer.selection_state, \"shift_arrow\", False): buffer.selection_state = None @r.add_binding(Keys.ShiftUp) def select_line_up(event):", "def backward_sexp(event): buffer = event.current_buffer document = buffer.document text =", "indented lambda line, _x=[]: bool(_x or _x.append(1)))] else: lines =", "if prompt_toolkit_version[0] != '3': @r.add_binding(Keys.ControlQuestionmark, save_before=lambda e: False) def redo(event):", "0 @r.add_binding(Keys.ShiftDown) def select_line_down(event): buffer = event.current_buffer if buffer.document.text_after_cursor: if", "\"\"\" Left that wraps around in multiline. \"\"\" if event.current_buffer.cursor_position", "event.current_buffer.cursor_position = m.end(0) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'b') @r.add_binding(Keys.Escape,", "bracketed_paste(event): from .mypython import CMD_QUEUE data = event.data buffer =", "[1]: a = 1 ... ... In [2]: a ...", "pos in reversed(positions): if pos <= buffer.cursor_position: break p =", "shouldn't if things look exactly # as they did where", ">= end_line - 1: n_changed -= 2 if uncomment: buffer.cursor_position", "= buffer.text # isspace doesn't respect vacuous truth if (not", "_x=[]: bool(_x or _x.append(1)))] else: lines = [data] event.current_buffer.insert_text(lines[0]) for", "ANSI_SEQUENCES from prompt_toolkit.application.current import get_app from prompt_toolkit.application import run_in_terminal from", "BLANK_LINES.finditer(text[::-1]): if m.start(0) > len(text) - cursor_position: event.current_buffer.cursor_position = len(text)", "= re.compile(r'([a-z0-9]+|[A-Z]{2,}|[a-zA-Z0-9][a-z0-9]*)') @r.add_binding(Keys.Escape, 'f') @r.add_binding(Keys.Escape, Keys.Right) def forward_word(event): text =", "not None: return '\\r' + match.group('line') + '\\n' elif match.group('ps2prompt')", "+= 1 if row == len(new_lines) - 1: new_lines.append('') new_lines[row],", "event.previous_key_sequence if pks and getattr(pks[-1], 'accept_next', False) and ((len(pks) ==", "row == len(new_lines) - 1: new_lines.append('') new_lines[row], new_lines[row-1] = new_lines[row-1],", "get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for (row, col, msg, m)", "Keys.Up) def previous_history_search(event): event.key_sequence[-1].accept_next = True buffer = event.current_buffer buffer.history_backward(count=event.arg,", "= get_pyflakes_warnings(document.text, frozenset(event.current_buffer.session._locals)) positions = [] for (row, col, msg,", "document = buffer.document row = document.cursor_position_row new_lines = document.lines[:] if", "system_copy(text): if \"Linux\" in platform.platform(): copy_command = ['xclip', '-selection', 'c']", "This won't work until # https://github.com/jonathanslenders/python-prompt-toolkit/pull/484 is # merged. if", "cursor_col = document.translate_index_to_position(document.cursor_position) if document.selection: from_, to = document.selection_range() start_line,", "start_line + 1 # Get the indentation for the comment", "line[min_indent] == '#' for line in document.lines[start_line:end_line]) and ''.join(document.lines[start_line:end_line]).strip()) lines", "line. It is assumed that the text contains no carriage", "& cursor position. otext = buffer.text ocpos = buffer.cursor_position #", "buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer = event.current_buffer", "def backward_paragraph(event): \"\"\" Move back one paragraph of text \"\"\"", "buffer.selection_state: buffer.selection_state = None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event):", "| (?P<ps2prompt>{PS2_PROMPTS_RE.pattern}))? # of the line. (?P<noprompt>(?(prompt)\\r|))? # If the", "' for i, j in emoji + [emoji_pudb]] + [r'In\\", "\"\"\" When tab should insert whitespace, do that instead of", "+ 1 # Get the indentation for the comment delimiters", "inside_string, matching_parens from .theme import emoji, emoji_pudb from .processors import", "<= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg if getattr(event.current_buffer.selection_state, \"shift_arrow\", False): event.current_buffer.selection_state", "cursor_position, state=[]): rstripped = text[:cursor_position].rstrip() lstripped = text[cursor_position:].lstrip() text_before_cursor =", "multiline = document_is_multiline_python(document) text_after_cursor = document.text_after_cursor text_before_cursor = document.text_before_cursor text", "auto_up(event): buffer = event.current_buffer count = event.arg if buffer.document.cursor_position_row >", "be removed once # https://github.com/prompt-toolkit/python-prompt-toolkit/pull/857 is in a # released", "\"error:\", e, file=sys.stderr) return p.stdout.decode('utf-8') @r.add_binding(Keys.ControlX, Keys.ControlW) def copy_to_clipboard(event): if", "line endings in a # bracketed paste. See: https://github.com/ipython/ipython/issues/9737 data", "we last left them. If either of those # have", "anyway. # This just makes this function easier to test.", "this function easier to test. lines = [i.rstrip() for i", "# as they did where we left off. # TODO:", "in platform.platform(): copy_command = ['xclip', '-selection', 'c'] else: copy_command =", "insert_text_ovewrite(buffer, word.lower()) return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event):", "copy_command[0], file=sys.stderr) except subprocess.CalledProcessError as e: print(copy_command[0], \"error:\", e, file=sys.stderr)", "1])) @r.add_binding(Keys.ControlX, Keys.ControlY) def paste_from_clipboard(event): paste_text_future = run_in_terminal(system_paste) event.current_buffer.cut_selection() paste_text_future.add_done_callback(lambda", "pos event.current_buffer._show_syntax_warning = True event.current_buffer.cursor_position = p @r.add_binding(Keys.Escape, 'n') def", "assumed that the text contains no carriage returns (\\r). Trailing", "to intelligently add a newline or execute. \"\"\" buffer =", "otext[ocpos + len(overwritten_text):] if move_cursor: buffer.cursor_position += len(data) @r.add_binding(Keys.Escape, 'l')", "+ '\\n' return '' def split_prompts(text, indent=''): r\"\"\" Takes text", "pos = m.end(0) - cursor_position break if pos: deleted =", "accept_line(event) else: multiline_enter(event) @r.add_binding(Keys.Escape, Keys.Enter) @r.add_binding(Keys.Escape, Keys.ControlJ) def insert_newline(event): auto_newline(event.current_buffer)", "docstring auto_newline(event.current_buffer) else: accept_line(event) elif not multiline: # Always accept", "= document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event):", "= matching_parens(text) for opening, closing in matching: if closing.end ==", "m.end(0) > cursor_position: pos = m.end(0) - cursor_position break if", "= buffer.document text = buffer.text row, col = document.translate_index_to_position(buffer.cursor_position) row", "text copied from mypython, Python, or IPython session and returns", "if pos: deleted = buffer.delete_before_cursor(count=pos) event.app.clipboard.set_text(deleted) def insert_text_ovewrite(buffer, data, move_cursor=True):", "# Even though we do auto_up, it can be out", "+ spaces%-4) else: backward_delete_char(event) # Reset the history search text", "are at the end of the buffer, accept unless we", "def select_line_up(event): buffer = event.current_buffer if buffer.document.text_before_cursor: if not buffer.selection_state:", "for re.sub for clearing prompts Replaces PS1 prompts with \\r", "i < end_line: if uncomment: lines.append(line[:min_indent] + line[min_indent+2:]) else: lines.append(line[:min_indent]", "return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer =", "is not None: return '\\r' + match.group('line') + '\\n' elif", "document.translate_row_col_to_index(row, col) positions.append(pos) return positions @r.add_binding(Keys.Escape, 'p') def previous_warning(event): positions", "is_returnable = Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\"", "= new_text buffer.cursor_position += n_changed @r.add_binding(Keys.ControlX, Keys.ControlE) def open_in_editor(event): event.current_buffer.open_in_editor(event.app)", "def backward_kill_word(event): buffer = event.current_buffer text = buffer.text cursor_position =", "range(1, len(lines)): lines[i] = textwrap.indent(lines[i], indent) # Extraneous newlines at", "is set to C-S-/ (C-?) in iTerm2 settings Keys.ControlQuestionmark =", "HasSelection, is_searching from prompt_toolkit.selection import SelectionState from prompt_toolkit.clipboard import ClipboardData", "Condition( lambda: get_app().current_buffer.is_returnable) @r.add_binding(Keys.Enter, filter=is_returnable) def multiline_enter(event): \"\"\" When not", "Up # Requires https://github.com/jonathanslenders/python-prompt-toolkit/pull/492. # We don't need a parallel", "0: cursor_position = 0 if cursor_position > len(text): cursor_position =", "the indentation for the comment delimiters min_indent = float('inf') for", "get_pyflakes_warnings import re import subprocess import sys import textwrap import", "position. \"\"\" buffer = event.app.current_buffer # Avoid issues when text", "re.compile(r'( *)[^ ]?') @r.add_binding(Keys.Escape, 'm') def back_to_indentation(event): \"\"\" Move back", "ALL_KEYS.append(\"<C-?>\") ANSI_SEQUENCES['\\x1b[ab'] = Keys.ControlQuestionmark Keys.ControlSlash = \"<C-/>\" ALL_KEYS.append(\"<C-/>\") ANSI_SEQUENCES['\\x1b\"5/'] =", "and cursor position text, cursor_position = state[0] state.clear() if cursor_position", "...: pass ... ...: ... ''') ['a = 1', 'a',", "@r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer = event.current_buffer text = buffer.text", "= document.lines[document.cursor_position_row+1:] blank_lines_before = 0 for line in lines_up_to_current[::-1]: if", "prompt is not # matched, this is a special #", "= event.current_buffer text = buffer.text cursor_position = event.current_buffer.cursor_position for m", "\\r and removes PS2 prompts. \"\"\" # TODO: Remove the", "= len(text) @r.add_binding(Keys.Escape, 'u') def upcase_word(event): buffer = event.current_buffer text", "continue indent = LEADING_WHITESPACE.search(line) if indent: min_indent = min(min_indent, len(indent.group(1)))", "+ [''] + stripped_after) # Even though we do auto_up,", "@r.add_binding(Keys.Escape, 'l') def downcase_word(event): buffer = event.current_buffer text = buffer.text", "can do this: # run(copy_command, input=text, encoding='utf-8', check=True) subprocess.run(copy_command, input=text.encode('utf-8'),", "# We are inside a docstring auto_newline(event.current_buffer) else: accept_line(event) elif", "def capitalize_word(event): buffer = event.current_buffer text = buffer.text cursor_position =", "for opening, closing in matching: if opening.start == (row, col):", "the original whitespace and cursor position. \"\"\" buffer = event.app.current_buffer", "= True up_position = buffer.document.get_cursor_up_position() buffer.cursor_position += up_position if not", "stripped_before = lines_up_to_current[:-blank_lines_before] stripped_after = lines_after_current[blank_lines_after:] # XXX: Emacs always", "return event.current_buffer.cursor_position = len(text) @r.add_binding(Keys.Escape, Keys.ControlF) def forward_sexp(event): buffer =", "but that's fine, because # we consider any change to", "document.translate_index_to_position(to - 1) end_line += 1 else: start_line = cursor_line", "matters here. if (not blank_lines_before and blank_lines_after) or blank_lines_before +", "None else: buffer.start_selection() @r.add_binding(Keys.ControlX, 'h') def select_all(event): buffer = event.current_buffer", "matched, this is a special # marker group that will", "line in enumerate(document.lines): if start_line <= i < end_line: if", "c.isalnum(): word = word[:i] + c.capitalize() + word[i+1:].lower() break insert_text_ovewrite(buffer,", "= textwrap.dedent(PROMPTED_TEXT_RE.sub(prompt_repl, text)).lstrip() lines = text.split('\\r') # Make sure multilines", "if closing.end == (row, col): new_pos = document.translate_row_col_to_index(opening.start[0]-1, opening.start[1]) buffer.cursor_position", "= buffer.text cursor_position = buffer.cursor_position pos = None for m", "buffer.text cursor_position = buffer.cursor_position pos = None for m in", "save_before=lambda e: False) def redo(event): event.current_buffer.redo() @r.add_binding(Keys.ControlSlash, save_before=lambda e: False)", "execute. \"\"\" buffer = event.current_buffer document = buffer.document multiline =", "\"\"\" if event.current_buffer.cursor_position + event.arg <= len(event.current_buffer.text): event.current_buffer.cursor_position += event.arg", "= event.current_buffer document = buffer.document text = buffer.text row, col" ]
[ "remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy", "in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0)", "work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1)", "in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available in", "student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy", "shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0)", "loglikelihood import * from statistics import * from nested import", "student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\", "7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in", "dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in", "= MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob =", "first stop of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for", "#V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\", "work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in", "Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm", "3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm", "worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\", "dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in", "in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm", "to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in", "in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1)", "one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1", "of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop", "in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy", "other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\", "exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\"", "other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work", "shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum *", "12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period", "tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female", "shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in", "3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm", "MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type)", "stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of", "= Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of", "student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy", "tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0)", "stop of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second", "shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\", "remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1)", "in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period", "work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to", "in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm", "* from statistics import * from nested import * #import", "work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum", "in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period", "to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0)", "public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu", "other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour", "7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0)", "9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period", "((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4]", "other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm", "driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy", "dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work", "p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\", "= [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = \"1\"", "cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\", "work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1)", "education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for", "dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour", "7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in", "threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour", "= cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\", "[0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = \"1\" BIOGEME_OBJECT.PARAMETERS['numberOfThreads']", "non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\", "second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V", "tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping", "dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour", "for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\", "V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\", "second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop", "in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy", "V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\", "shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum *", "shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available", "in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour", "passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\", "BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE =", "nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives']", "other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in", "in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0)", "for quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V for", "tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other", "worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy", "to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in", "5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0)", "for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of inbound half", "edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work", "edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to", "shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\", "passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour", "inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of", "Beta('dummy for second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for", "shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu=", "nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE =", "Beta('dummy for second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for", "Beta('dummy for first stop of inbound half tour', 0,-10,10,1) second_stop_inbound=", "in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period", "edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to", "a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping", "edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum", "shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to", "in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy", "outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour", "other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\", "worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\", "cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = \"1\" BIOGEME_OBJECT.PARAMETERS['numberOfThreads'] =", "a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education V_edu =", "in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy", "tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has", "dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in", "public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu", "in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm", "in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period", "available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu=", "dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy", "shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance", "tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound=", "remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in", "first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit", "to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0)", "p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\\", "in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am", "dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour", "cons_other = Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1)", "worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy", "edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1)", "work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\", "tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other", ", [0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob =", ", [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter')", "to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in", "quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\", "tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping", "shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1)", "passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour", "= Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other", "half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half tour',0,-10,10,0)", "worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\", "cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\", "dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in", "dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour", "a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm", "shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver", "Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0)", "0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of inbound half tour',0,-10,10,0)", "time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1)", "passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\", "dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy", "other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop", "= bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) >", "in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy", "other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\", "in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy", "work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1:", "edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger", "sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit =", "* from loglikelihood import * from statistics import * from", "edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker", "for work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping =", "dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in", "in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period", "second_stop_inbound= Beta('dummy for second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy", "work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1)", "available in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log", "twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu", "p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\", "= ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet =", "a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm", "distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log", "tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping", "Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other =", "dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in", "to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0)", "in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0)", "education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\", "driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy", "first_stop_outbound= Beta('dummy for first stop of outbound half tour', 0,-10,10,0)", "in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1)", "a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm", "9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period", "from biogeme import * from headers import * from loglikelihood", "in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain", "tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy", "tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other", "a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit V_quit=", "in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log", "for education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\", "tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first", "female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy", "for non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\", "work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in", "biogeme import * from headers import * from loglikelihood import", "half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half tour',0,-10,10,0)", "#V for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\", "student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\", "dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour", "passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour", "pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period", "threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V =", "#prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10))", "other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver", "9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in", "tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy", "quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in", "12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period", "time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for", "Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy", "a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\", "p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\", "public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\", "in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy", "threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\", "tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in", "logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in", "dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in", "5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm", "5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm", "7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in", "{0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit", "in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period", "to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in", "* otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a", "tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping", "stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1)", "other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am", "logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in", "in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period", "work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am", "availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] = \"1\" BIOGEME_OBJECT.PARAMETERS['numberOfThreads'] = \"6\"", "= MU1 , [0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit", "shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am", "dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour", "student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy", "quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in", "edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum", "other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student", "7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0)", "other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first", "V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\", "of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound", "cons_work= Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0)", "second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop", "p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to", "other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to", "3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first", "tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping", "tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0)", "tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of outbound half", "other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time", "p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to", "in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available", "p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to", "female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\", "in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm", "in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm", "7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0)", "in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy", "cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\", "first_stop_inbound= Beta('dummy for first stop of inbound half tour', 0,-10,10,1)", "Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping", "#V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\", "dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in", "9:30am to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in", "in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2 = Beta('MU", "12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period", "first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0)", "MU1 , [0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob", "choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] = \"CFSQP\" BIOGEME_OBJECT.PARAMETERS['checkDerivatives'] =", "other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0)", "in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0)", "passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\", "{0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit = MU2 ,", "public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu", "work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour", "other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT", "p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to", "edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1)", "edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time", "public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\", "to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in", "p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1 = Beta('MU for", "other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\", "= cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\ female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\", "edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1)", "headers import * from loglikelihood import * from statistics import", "other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in", "work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0)", "Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in", "in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log", "in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy", "work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger", "edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V", "dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain", "in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy", "dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour", "in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0)", "for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for", "worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy", "edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am", "other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger", "9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in", "in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy", "tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female", "a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm", "edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am", "from statistics import * from nested import * #import random", "= Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1)", "p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to", "work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in", "to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in", "12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period", "to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0)", "Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work=", "edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum", "worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V", "p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\", "7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in", "of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of", "dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour", "in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0)", "work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT", "10pm to 7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am", "dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour", "pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period", "in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1)", "7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in", "in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy", "cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\", "student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\", "edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1)", "[0] nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type)", "edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in", "dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour", "in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1 =", "work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\", "other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\", "p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\", "stop of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second", "quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in", "other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student", "work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\", "tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female", "10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am in edu',0,-10,10,0) a700_a930_shopping=", "time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for", "shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to", "p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\", "stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of", "tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping", "5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm", "worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\", "= Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound=", "dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in", "shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V", "otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V", "0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of outbound half tour',0,-10,10,0)", "* worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a", "work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am", "subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour", "dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour", "half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of outbound", "edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0)", "in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy", "edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm", "10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other=", "work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available in", "5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0)", "edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time available in", "work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\", "= exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm'] =", "shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger", "shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in", "shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to", "tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other", "shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1)", "7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0)", "quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1)", "statistics import * from nested import * #import random cons_work=", "tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance", "to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in", "Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V", "dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in", "p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am", "av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit = MU2", "in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1)", "outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for second stop of", "work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other", "other',0,-10,10,0) p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to", "p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm to 7am", "in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0)", "for second stop of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+", "tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other", "tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other", "in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in", "Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other=", "tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work", "in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in", "for 3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy", "edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver", "public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\", "shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum *", "shopping_tour_dummy_Q=Beta('shopping tour dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1)", "distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping=", "tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy", "7am in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2 =", "dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy", "in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour", "inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half", "p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\", "other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\", "10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to 9:30am", "dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in", "edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1)", "Beta('log tour distance in edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in", "shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to", "in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period", "public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\", "edu',0,-10,10,0) work_tour_dummy_S=Beta('work tour dummy in shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in", "in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm", "to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0)", "in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period", "in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0)", "threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy", "for 3+ stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for", "quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour dummy in", "in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1)", "tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of inbound half", "one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour", "Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm", "work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to", "tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am", "pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period", "in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0)", "#import random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons", "driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\", "to 7:30 pm in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in", "time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for", "p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\", "9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in", "= nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude", "dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in", "edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT", "a700_a930_edu= Beta('period 7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to", "in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in edu',0,-10,10,1)", "p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping = cons_shopping+\\", "in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in", "[1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE", "9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period", "second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\", "dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy", "work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm", "female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\ worker_dummy_W*worker_dummy+\\ driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\", "V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 ,", "half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop of inbound", "in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm", "of inbound half tour',0,-10,10,0) threeplus_stop_inbound=Beta('dummy for 3+ stop of inbound", "sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one", "dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in", "nest_quit = MU1 , [0] nest_nonquit = MU2 , [1,2,3,4]", "edu',0,-10,10,0) tour_distance_shopping= Beta('log tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance", "quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of inbound half tour',", "in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0)", "= Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q", "9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period", "to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in", "edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\", "Beta('MU for non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\", "outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+ stop of outbound half", "tour dummy in quit',0,-10,10,0) sub_tour_dummy_Q=Beta('has subtour dummy in quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero", "distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work= Beta('period", "Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop of inbound", "0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for", "dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in other',0,-10,10,0) edu_tour_dummy_O=Beta('edu tour", "logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1) time_window_work=Beta('time available in", "shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum *", "driver_dummy_W*driver_dummy+\\ passenger_dummy_W*passenger_dummy+\\ public_dummy_W*public_dummy+\\ work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\", "p300_p530_other=Beta('period 3pm to 5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30", "edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum", "in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1)", "tour_distance_work= Beta('log tour distance in work',0,-10,10,0) tour_distance_edu= Beta('log tour distance", "time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1)", "quit',0,-10,10,0) zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+", "5:30pm in other',0,-10,10,0) p530_p730_other=Beta('period 5:30pm to 7:30 pm in other',0,-10,10,0)", "import * #import random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu", "in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0)", "shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker", "* from headers import * from loglikelihood import * from", "nest_nonquit = MU2 , [1,2,3,4] nests=nest_quit,nest_nonquit prob = nested(V,av,nests,stop_type) #prob", "dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy in", "to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in", "3+ stop of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in", "in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in", "dummy in other',0,-10,10,0) driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in", "in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy", "3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm", "female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\", "in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy", "a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a #V for shopping V_shopping =", "pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period", "import * from loglikelihood import * from statistics import *", "work',0,-10,10,1) other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0)", "dummy in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in", "other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1 = Beta('MU", "work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker", "of outbound half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu", "in edu',0,-10,10,1) driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0)", "in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in", "bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0", "MU1 = Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for non-quit',", "dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy", "to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0)", "import * from headers import * from loglikelihood import *", "shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in shopping',0,-10,10,0)", "for second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy for 3+", "5:30pm to 7:30 pm in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm", "V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\ student_dummy_O*student_dummy+\\ worker_dummy_O*worker_dummy+\\ driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\", "= {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0]", "exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet", "7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in", "in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0)", "tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain", "in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work tour dummy in", "= Beta('MU for non-quit', 1.0,0,100,1) #V for work V_work= cons_work+\\", "p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am", "work', 0,-10,10,0) cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons", "p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\", "in edu',0,-10,10,1) shop_logsum=Beta('shop logsum in shop',0,-10,10,1) other_logsum=Beta('other logsum in other',0,-10,10,1)", "import * from statistics import * from nested import *", "of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy for second stop", "half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of outbound half", "female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1) worker_dummy_E=Beta('worker dummy", "10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0) MU1", "driver_dummy_O=Beta('driver dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy", "driver_dummy_E=Beta('driver dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy", "10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am", "rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE", "10pm to 7am in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1)", "work_logsum * worklogsum+\\ time_window_work*time_window_h+\\ tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\", "#V for shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\", "student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\", "passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\", "p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to", "cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0)", "to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in edu',0,-10,10,0)", "10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu=", "tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in", "dummy in quit',0,-10,10,1) other_tour_dummy_Q=Beta('other tour dummy in quit',0,-10,10,1) first_tour_dummy_Q=Beta('first tour", "Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm", "in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy in", "work',0,-10,10,0) passenger_dummy_W=Beta('passenger dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work", "tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy", "stop of inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop", "to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30 pm in", "tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of outbound half tour',", "for first stop of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy", "other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1)", "1.0,0,100,1) #V for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\", "0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter')", "dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in", "dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in edu',0,-10,10,1) other_tour_dummy_E=Beta('other tour", "dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in", "female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy", "cons_Q = Beta('cons for quit',0,-10,10,1) first_stop_inbound= Beta('dummy for first stop", "dummy in work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy", "in work',0,-10,10,0) p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm", "for shopping V_shopping = cons_shopping+\\ work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\", "work',0,-10,10,0) public_dummy_W=Beta('PT dummy in work',0,-10,10,0) work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1)", "in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm", "other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0)", "to 12pm in shopping',0,-10,10,0) p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0)", "prob = nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter')", "a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\", "for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons", "first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2)", "logsum in other',0,-10,10,1) time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available", "for work V_work= cons_work+\\ work_tour_dummy_W*1*(tour_type==1)+\\ edu_tour_dummy_W*1*(tour_type==2)+\\ shopping_tour_dummy_W*1*(tour_type==3)+\\ other_tour_dummy_W*1*(tour_type==4)+\\ female_dummy_W*female_dummy+\\ student_dummy_W*student_dummy+\\", "for first stop of inbound half tour', 0,-10,10,1) second_stop_inbound= Beta('dummy", "other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in other',0,-10,10,0) p300_p530_other=Beta('period 3pm to", "in other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0)", "in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0) a700_a930_other= Beta('period", "shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q = Beta('cons for", "other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu", "to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am in shopping',0,-10,10,0)", "Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in work',0,-10,10,0)", "Beta('dummy for first stop of outbound half tour', 0,-10,10,0) second_stop_outbound=", "work',0,-10,10,0) student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver", "for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons", "for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\", "inbound half tour',0,-10,10,0) first_stop_outbound= Beta('dummy for first stop of outbound", "dummy in work',0,-10,10,1) shopping_tour_dummy_W=Beta('shopping tour dummy in work',0,-10,10,1) other_tour_dummy_W=Beta('other tour", "other_tour_dummy_W=Beta('other tour dummy in work',0,-10,10,1) female_dummy_W=Beta('female dummy in work',0,-10,10,0) student_dummy_W=Beta('student", "driver_dummy_O*driver_dummy+\\ passenger_dummy_O*passenger_dummy+\\ public_dummy_O*public_dummy+\\ other_logsum * otherlogsum+\\ time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\", "in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period", "zero_tour_remain_Q=Beta('zero tour remain dummy',0,-10,10,1) one_tour_remain_Q=Beta('one tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour", "edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other}", "* edulogsum+\\ time_window_edu*time_window_h+\\ tour_distance_edu*log(1+distance)+\\ a700_a930_edu*p_700a_930a+\\ a930_a1200_edu*p_930a_1200a+\\ p300_p530_edu*p_300p_530p+\\ p530_p730_edu*p_530p_730p+\\ p730_p1000_edu*p_730p_1000p+\\ p1000_a700_edu*p_1000p_700a", "in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in edu',0,-10,10,1)", "work_tour_dummy_S*1*(tour_type==1)+\\ edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\", "to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0)", "BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter') availabilityStatistics(av,'obsIter') BIOGEME_OBJECT.PARAMETERS['optimizationAlgorithm']", "a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education V_edu", "tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1) female_dummy_O=Beta('female", "a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0) a930_a1200_shopping=Beta('period 9:30am to", "5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0)", "dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0) female_dummy_S=Beta('female dummy", "7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to 12pm in", "7am in edu',0,-10,10,0) a700_a930_shopping= Beta('period 7am to 9:30am in shopping',0,-10,10,0)", "tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1) edu_tour_dummy_W=Beta('edu tour", "passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum", "tour remain dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy", "other',0,-10,10,1) female_dummy_O=Beta('female dummy in other',0,-10,10,0) student_dummy_O=Beta('student dummy in other',0,-10,10,0) worker_dummy_O=Beta('worker", "other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop}", "half tour',0,-10,10,0) work_tour_dummy_Q=Beta('work tour dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy", "> 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter') choiceSet = [0,1,2,3,4] cteLoglikelihood(choiceSet,stop_type,'obsIter')", "p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\", "driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum * shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\", "* from nested import * #import random cons_work= Beta('cons for", "nested(V,av,nests,stop_type) #prob = bioLogit(V,av,stop_type) rowIterator('obsIter') BIOGEME_OBJECT.ESTIMATE = Sum(log(prob),'obsIter') exclude =", "to 7am in other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2", "in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm in work',0,-10,10,0) p300_p530_work=Beta('period 3pm", "dummy in quit',0,-10,10,1) edu_tour_dummy_Q=Beta('edu tour dummy in quit',0,-10,10,1) shopping_tour_dummy_Q=Beta('shopping tour", "public_dummy_O=Beta('PT dummy in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum", "dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy in", "p1000_a700_work=Beta('period 10pm to 7am in work',0,-10,10,0) a700_a930_edu= Beta('period 7am to", "edu_tour_dummy_O=Beta('edu tour dummy in other',0,-10,10,0) shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0)", "in edu',0,-10,10,0) p730_p1000_edu=Beta('period 7:30pm to 10pm in edu',0,-10,10,0) p1000_a700_edu=Beta('period 10pm", "tour distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in", "7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0)", "a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\", "#V for education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\ edu_tour_dummy_E*1*(tour_type==2)+\\ shopping_tour_dummy_E*1*(tour_type==3)+\\ other_tour_dummy_E*1*(tour_type==4)+\\", "shopping_tour_dummy_S=Beta('shopping tour dummy in shopping',0,-10,10,1) other_tour_dummy_S=Beta('other tour dummy in shopping',0,-10,10,0)", "tour_distance_work*log(1+distance)+\\ a700_a930_work*p_700a_930a+\\ a930_a1200_work*p_930a_1200a+\\ p300_p530_work*p_300p_530p+\\ p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education", "shopping_tour_dummy_O=Beta('shopping tour dummy in other',0,-10,10,0) other_tour_dummy_O=Beta('other tour dummy in other',0,-10,10,1)", "in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0)", "Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude nullLoglikelihood(av,'obsIter')", "p730_p1000_shopping=Beta('period 7:30pm to 10pm in shopping',0,-10,10,0) p1000_a700_shopping=Beta('period 10pm to 7am", "tour distance in shopping',0,-10,10,0) tour_distance_other=Beta('log tour distance in other',0,-10,10,0) a700_a930_work=", "second_stop_outbound= Beta('dummy for second stop of outbound half tour',0,-10,10,0) threeplus_stop_outbound=Beta('dummy", "dummy in other',0,-10,10,0) passenger_dummy_O=Beta('passenger dummy in other',0,-10,10,0) public_dummy_O=Beta('PT dummy in", "shopping',0,-10,10,1) edu_tour_dummy_S=Beta('edu tour dummy in shopping',0,-10,10,1) shopping_tour_dummy_S=Beta('shopping tour dummy in", "nested import * #import random cons_work= Beta('cons for work', 0,-10,10,0)", "p1000_a700_shopping*p_1000p_700a #V for other V_other=cons_other+\\ work_tour_dummy_O*1*(tour_type==1)+\\ edu_tour_dummy_O*1*(tour_type==2)+\\ shopping_tour_dummy_O*1*(tour_type==3)+\\ other_tour_dummy_O*1*(tour_type==4)+\\ female_dummy_O*female_dummy+\\", "to 12pm in edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0)", "work_tour_dummy_E=Beta('work tour dummy in edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1)", "shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30 pm in shopping',0,-10,10,0) p730_p1000_shopping=Beta('period 7:30pm", "female_dummy_E*female_dummy+\\ student_dummy_E*student_dummy+\\ worker_dummy_E*worker_dummy+\\ driver_dummy_E*driver_dummy+\\ passenger_dummy_E*passenger_dummy+\\ public_dummy_E*public_dummy+\\ edu_logsum * edulogsum+\\ time_window_edu*time_window_h+\\", "p530_p730_work*p_530p_730p+\\ p730_p1000_work*p_730p_1000p+\\ p1000_a700_work*p_1000p_700a #V for education V_edu = cons_edu+\\ work_tour_dummy_E*1*(tour_type==1)+\\", "p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\", "work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0) p1000_a700_work=Beta('period 10pm to", "dummy in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in", "edu_tour_dummy_S*1*(tour_type==2)+\\ shopping_tour_dummy_S*1*(tour_type==3)+\\ other_tour_dummy_S*1*(tour_type==4)+\\ female_dummy_S*female_dummy+\\ student_dummy_S*student_dummy+\\ worker_dummy_S*worker_dummy+\\ driver_dummy_S*driver_dummy+\\ passenger_dummy_S*passenger_dummy+\\ public_dummy_S*public_dummy+\\ shop_logsum", "first stop of outbound half tour', 0,-10,10,0) second_stop_outbound= Beta('dummy for", "time_window_other*time_window_h+\\ tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for", "shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT dummy in shopping',0,-10,10,0) work_tour_dummy_O=Beta('work", "random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu = Beta('cons for", "from headers import * from loglikelihood import * from statistics", "distance in other',0,-10,10,0) a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0)", "V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\ second_stop_outbound*second_stop*second_bound+\\ threeplus_stop_outbound*three_plus_stop*second_bound+\\ work_tour_dummy_Q*1*(tour_type==1)+\\ edu_tour_dummy_Q*1*(tour_type==2)+\\ shopping_tour_dummy_Q*1*(tour_type==3)+\\", "shopping_tour_dummy_Q*1*(tour_type==3)+\\ other_tour_dummy_Q*1*(tour_type==4)+\\ first_tour_dummy_Q*first_tour_dummy+\\ sub_tour_dummy_Q*has_subtour+zero_tour_remain_Q*1*(tour_remain==0)+\\ one_tour_remain_Q*1*(tour_remain==1)+twoplus_tour_remain_Q*1*(tour_remain>=2) V = {0:V_quit,1: V_work,2:V_edu,3:V_shopping,4:V_other} av=", "a700_a930_work= Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to", "dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student dummy in", "in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time available", "a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am to", "in other',0,-10,10,0) p730_p1000_other=Beta('period 7:30pm to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm", "edu',0,-10,10,1) edu_tour_dummy_E=Beta('edu tour dummy in edu',0,-10,10,1) shopping_tour_dummy_E=Beta('shopping tour dummy in", "* #import random cons_work= Beta('cons for work', 0,-10,10,0) cons_edu =", "time_window_other= Beta('time available in other',0,-10,10,1) tour_distance_work= Beta('log tour distance in", "from nested import * #import random cons_work= Beta('cons for work',", "p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30", "dummy',0,-10,10,0) twoplus_tour_remain_Q=Beta('2+ tour remain dummy',0,-10,10,1) work_tour_dummy_W=Beta('work tour dummy in work',0,-10,10,1)", "dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in", "student_dummy_W=Beta('student dummy in work',0,-10,10,1) worker_dummy_W=Beta('worker dummy in work',0,-10,10,1) driver_dummy_W=Beta('driver dummy", "available in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping= Beta('time", "available in edu',0,-10,10,1) time_window_shopping= Beta('time available in shopping',0,-10,10,1) time_window_other= Beta('time", "female_dummy_S=Beta('female dummy in shopping',0,-10,10,0) student_dummy_S=Beta('student dummy in shopping',0,-10,10,1) worker_dummy_S=Beta('worker dummy", "shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy in shopping',0,-10,10,0) public_dummy_S=Beta('PT", "p300_p530_work=Beta('period 3pm to 5:30pm in work',0,-10,10,0) p530_p730_work=Beta('period 5:30pm to 7:30", "3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to 7:30 pm", "shopping',0,-10,10,0) a700_a930_other= Beta('period 7am to 9:30am in other',0,-10,10,0) a930_a1200_other=Beta('period 9:30am", "p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit V_quit= cons_Q+first_stop_inbound*first_stop*first_bound+\\ second_stop_inbound*second_stop*first_bound+\\ threeplus_stop_inbound*three_plus_stop*first_bound+\\ first_stop_outbound*first_stop*second_bound+\\", "p300_p530_shopping=Beta('period 3pm to 5:30pm in shopping',0,-10,10,0) p530_p730_shopping=Beta('period 5:30pm to 7:30", "quit',1,0,100,1) MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V for work", "MU2 = Beta('MU for non-quit', 1.0,0,100,1) #V for work V_work=", "tour_distance_other*log(1+distance)+\\ a700_a930_other*p_700a_930a+\\ a930_a1200_other*p_930a_1200a+\\ p300_p530_other*p_300p_530p+\\ p530_p730_other*p_530p_730p+\\ p730_p1000_other*p_730p_1000p+\\ p1000_a700_other*p_1000p_700a #V for quit", "time_window_work=Beta('time available in work',0,-10,10,1) time_window_edu= Beta('time available in edu',0,-10,10,1) time_window_shopping=", "in other',0,-10,10,0) work_logsum=Beta('work logsum in work',0,-10,10,1) edu_logsum=Beta('edu logsum in edu',0,-10,10,1)", "cons_edu = Beta('cons for education',0,-50,10,0) cons_shopping = Beta('cons for shopping',0,-10,10,0)", "import * from nested import * #import random cons_work= Beta('cons", "to 10pm in other',0,-10,10,0) p1000_a700_other=Beta('period 10pm to 7am in other',0,-10,10,0)", "Beta('cons for shopping',0,-10,10,0) cons_other = Beta('cons for other',0,-10,10,0) cons_Q =", "worker_dummy_S=Beta('worker dummy in shopping',0,-10,10,0) driver_dummy_S=Beta('driver dummy in shopping',0,-10,10,0) passenger_dummy_S=Beta('passenger dummy", "from loglikelihood import * from statistics import * from nested", "edu',0,-10,10,0) p300_p530_edu=Beta('period 3pm to 5:30pm in edu',0,-10,10,0) p530_p730_edu=Beta('period 5:30pm to", "other',0,-10,10,0) MU1 = Beta('MU for quit',1,0,100,1) MU2 = Beta('MU for", "V_work,2:V_edu,3:V_shopping,4:V_other} av= {0:avail_quit,1:avail_workstop,2:avail_edustop,3:avail_shopstop,4:avail_otherstop} nest_quit = MU1 , [0] nest_nonquit =", "7:30 pm in work',0,-10,10,0) p730_p1000_work=Beta('period 7:30pm to 10pm in work',0,-10,10,0)", "= Sum(log(prob),'obsIter') exclude = ((avail_violation==1)+(origin_mtz==0)+(destination_mtz==0)+(time_window_h>=10)) > 0 BIOGEME_OBJECT.EXCLUDE = exclude", "other_tour_dummy_E=Beta('other tour dummy in edu',0,-10,10,1) female_dummy_E=Beta('female dummy in edu',0,-10,10,0) student_dummy_E=Beta('student", "* shoplogsum+\\ time_window_shopping*time_window_h+\\ tour_distance_shopping*log(1+distance)+\\ a700_a930_shopping*p_700a_930a+\\ a930_a1200_shopping*p_930a_1200a+\\ p300_p530_shopping*p_300p_530p+\\ p530_p730_shopping*p_530p_730p+\\ p730_p1000_shopping*p_730p_1000p+\\ p1000_a700_shopping*p_1000p_700a", "7am to 9:30am in edu',0,-10,10,0) a930_a1200_edu=Beta('period 9:30am to 12pm in", "Beta('period 7am to 9:30am in work',0,-10,10,0) a930_a1200_work=Beta('period 9:30am to 12pm", "in edu',0,-10,10,0) passenger_dummy_E=Beta('passenger dummy in edu',0,-10,10,0) public_dummy_E=Beta('PT dummy in edu',0,-10,10,0)" ]
[ "Flask from flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db = SQLAlchemy()", "SQLAlchemy() # 创建app def create_app(): # flask操作对象 app = Flask(__name__)", "app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department) app.register_blueprint(salary) app.register_blueprint(attendance)", "# 创建app def create_app(): # flask操作对象 app = Flask(__name__) #", "= Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统", "def create_app(): # flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config)", "# 初始化模块 from config import Config from flask import Flask", "# 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统 from app.view", "= SQLAlchemy() # 创建app def create_app(): # flask操作对象 app =", "create_app(): # flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) #", "from flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db = SQLAlchemy() #", "Config from flask import Flask from flask_sqlalchemy import SQLAlchemy #", "app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统 from app.view import employee", "初始化数据库 db.init_app(app) # 员工管理子系统 from app.view import employee # 职位管理子系统", "from app.view import post # 部门管理子系统 from app.view import department", "import salary # 考勤管理子系统 from app.view import attendance # 统一对外接口蓝本", "工资管理子系统 from app.view import salary # 考勤管理子系统 from app.view import", "salary # 考勤管理子系统 from app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee)", "import employee # 职位管理子系统 from app.view import post # 部门管理子系统", "employee # 职位管理子系统 from app.view import post # 部门管理子系统 from", "post # 部门管理子系统 from app.view import department # 工资管理子系统 from", "from app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department) app.register_blueprint(salary)", "初始化模块 from config import Config from flask import Flask from", "config import Config from flask import Flask from flask_sqlalchemy import", "app.view import employee # 职位管理子系统 from app.view import post #", "员工管理子系统 from app.view import employee # 职位管理子系统 from app.view import", "SQLAlchemy # 数据库操作对象 db = SQLAlchemy() # 创建app def create_app():", "flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db = SQLAlchemy() # 创建app", "部门管理子系统 from app.view import department # 工资管理子系统 from app.view import", "attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department) app.register_blueprint(salary) app.register_blueprint(attendance) return app", "import Config from flask import Flask from flask_sqlalchemy import SQLAlchemy", "import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department) app.register_blueprint(salary) app.register_blueprint(attendance) return", "# 部门管理子系统 from app.view import department # 工资管理子系统 from app.view", "Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统 from", "from flask import Flask from flask_sqlalchemy import SQLAlchemy # 数据库操作对象", "考勤管理子系统 from app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post) app.register_blueprint(department)", "import post # 部门管理子系统 from app.view import department # 工资管理子系统", "# 数据库操作对象 db = SQLAlchemy() # 创建app def create_app(): #", "app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) #", "# 初始化数据库 db.init_app(app) # 员工管理子系统 from app.view import employee #", "# 考勤管理子系统 from app.view import attendance # 统一对外接口蓝本 app.register_blueprint(employee) app.register_blueprint(post)", "职位管理子系统 from app.view import post # 部门管理子系统 from app.view import", "import department # 工资管理子系统 from app.view import salary # 考勤管理子系统", "db = SQLAlchemy() # 创建app def create_app(): # flask操作对象 app", "db.init_app(app) # 员工管理子系统 from app.view import employee # 职位管理子系统 from", "通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app) # 员工管理子系统 from app.view import", "import SQLAlchemy # 数据库操作对象 db = SQLAlchemy() # 创建app def", "from app.view import employee # 职位管理子系统 from app.view import post", "app.view import post # 部门管理子系统 from app.view import department #", "app.view import department # 工资管理子系统 from app.view import salary #", "department # 工资管理子系统 from app.view import salary # 考勤管理子系统 from", "flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库 db.init_app(app)", "from config import Config from flask import Flask from flask_sqlalchemy", "flask import Flask from flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db", "数据库操作对象 db = SQLAlchemy() # 创建app def create_app(): # flask操作对象", "创建app def create_app(): # flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置", "from app.view import department # 工资管理子系统 from app.view import salary", "app.view import salary # 考勤管理子系统 from app.view import attendance #", "# flask操作对象 app = Flask(__name__) # 通过配置文件读取并应用配置 app.config.from_object(Config) # 初始化数据库", "# 工资管理子系统 from app.view import salary # 考勤管理子系统 from app.view", "import Flask from flask_sqlalchemy import SQLAlchemy # 数据库操作对象 db =", "from app.view import salary # 考勤管理子系统 from app.view import attendance", "# 员工管理子系统 from app.view import employee # 职位管理子系统 from app.view", "# 职位管理子系统 from app.view import post # 部门管理子系统 from app.view" ]
[ "__open_abi(self): return json.load(open(self._abi_path, \"r\")) @property def abi(self): return self.__json_abi @property", "self._contract_address @property def eth(self): return self._contract.web3.eth @property def w3(self): return", "EthereumConnection(): def __init__(self, url_node): self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node)", "ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn = eth_conn self._contract_address", "web3.Web3(self._node_provider) @property def w3(self): return self._w3 @property def url_node(self): return", "def contract(self): return self._contract @property def address(self): return self._contract_address @property", "@property def eth(self): return self._contract.web3.eth @property def w3(self): return self._eth_conn.w3", "def w3(self): return self._w3 @property def url_node(self): return self._url_node class", "return self._contract_address @property def eth(self): return self._contract.web3.eth @property def w3(self):", "self.__json_abi @property def contract(self): return self._contract @property def address(self): return", "def address(self): return self._contract_address @property def eth(self): return self._contract.web3.eth @property", "self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return", "web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def w3(self): return self._w3 @property", "abi(self): return self.__json_abi @property def contract(self): return self._contract @property def", "= eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi =", "<filename>listener/src/ethereum_connection.py<gh_stars>0 import json import web3 class EthereumConnection(): def __init__(self, url_node):", "= web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def w3(self): return self._w3", "self._w3 @property def url_node(self): return self._url_node class ContractConnection(): def __init__(self,", "self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi = self.__open_abi() self._contract", "= self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi = self.__open_abi() self._contract =", "return self._contract @property def address(self): return self._contract_address @property def eth(self):", "abi_path self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi )", "self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path, \"r\")) @property", "import web3 class EthereumConnection(): def __init__(self, url_node): self._url_node = url_node", "w3(self): return self._w3 @property def url_node(self): return self._url_node class ContractConnection():", "import json import web3 class EthereumConnection(): def __init__(self, url_node): self._url_node", "def __init__(self, url_node): self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3", "eth_conn, contract_address, abi_path): self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path", ") def __open_abi(self): return json.load(open(self._abi_path, \"r\")) @property def abi(self): return", "\"r\")) @property def abi(self): return self.__json_abi @property def contract(self): return", "@property def address(self): return self._contract_address @property def eth(self): return self._contract.web3.eth", "self._url_node class ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn =", "eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi = self.__open_abi()", "url_node(self): return self._url_node class ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path):", "@property def w3(self): return self._w3 @property def url_node(self): return self._url_node", "@property def url_node(self): return self._url_node class ContractConnection(): def __init__(self, eth_conn,", "def abi(self): return self.__json_abi @property def contract(self): return self._contract @property", "self._contract @property def address(self): return self._contract_address @property def eth(self): return", "return self.__json_abi @property def contract(self): return self._contract @property def address(self):", "self._w3 = web3.Web3(self._node_provider) @property def w3(self): return self._w3 @property def", "abi_path): self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path", "def url_node(self): return self._url_node class ContractConnection(): def __init__(self, eth_conn, contract_address,", "contract(self): return self._contract @property def address(self): return self._contract_address @property def", "= self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path, \"r\"))", "self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path,", "= self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def __open_abi(self):", "@property def contract(self): return self._contract @property def address(self): return self._contract_address", "json import web3 class EthereumConnection(): def __init__(self, url_node): self._url_node =", "= url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def", "self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def w3(self): return", "= abi_path self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi", "self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract(", "address=self._contract_address, abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path, \"r\")) @property def", "url_node): self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider)", "self._abi_path = abi_path self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address,", "abi=self.__json_abi ) def __open_abi(self): return json.load(open(self._abi_path, \"r\")) @property def abi(self):", "__init__(self, eth_conn, contract_address, abi_path): self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address)", "web3 class EthereumConnection(): def __init__(self, url_node): self._url_node = url_node self._node_provider", "contract_address, abi_path): self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path =", "def __open_abi(self): return json.load(open(self._abi_path, \"r\")) @property def abi(self): return self.__json_abi", "class ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn = eth_conn", "return self._url_node class ContractConnection(): def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn", "self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property", "self.__json_abi = self.__open_abi() self._contract = self._eth_conn.w3.eth.contract( address=self._contract_address, abi=self.__json_abi ) def", "return json.load(open(self._abi_path, \"r\")) @property def abi(self): return self.__json_abi @property def", "@property def abi(self): return self.__json_abi @property def contract(self): return self._contract", "__init__(self, url_node): self._url_node = url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 =", "def __init__(self, eth_conn, contract_address, abi_path): self._eth_conn = eth_conn self._contract_address =", "return self._w3 @property def url_node(self): return self._url_node class ContractConnection(): def", "class EthereumConnection(): def __init__(self, url_node): self._url_node = url_node self._node_provider =", "= web3.Web3(self._node_provider) @property def w3(self): return self._w3 @property def url_node(self):", "json.load(open(self._abi_path, \"r\")) @property def abi(self): return self.__json_abi @property def contract(self):", "address(self): return self._contract_address @property def eth(self): return self._contract.web3.eth @property def", "self._eth_conn = eth_conn self._contract_address = self._eth_conn.w3.toChecksumAddress(contract_address) self._abi_path = abi_path self.__json_abi", "url_node self._node_provider = web3.HTTPProvider(self._url_node) self._w3 = web3.Web3(self._node_provider) @property def w3(self):" ]
[ "axis=0) p3 = np.percentile(self.yout[..., ndof * node + 1], 50", "def plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ):", "= np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 =", "The figure object with the plot. \"\"\" ndof = self.number_dof", "on the rotor system in a 3D view. Parameters ----------", "title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "= \"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if", "1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1,", "response given a set of probes. Parameters ---------- dof :", "np.cos(angle) ** 2 + _probe_resp[1] * np.sin(angle) ** 2 )", "force_resp : array Array with the force response for each", "radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ),", "fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\",", "= frequency_range self.number_dof = number_dof def plot_magnitude( self, probe, percentile=[],", "node], 50 + p / 2, axis=0) p2 = np.percentile(self.yout[...,", "Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with diagrams", "go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5,", "in fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1,", "the rotor system in a 3D view. Parameters ---------- percentile", "in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1),", "yout self.xout = xout self.nodes_list = nodes_list self.nodes_pos = nodes_pos", "__init__(self, forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude", "p / 2, axis=1) p2 = np.percentile(self.phase, 50 - p", "borderwidth=2, ), ) return fig def plot(self, percentile=[], conf_interval=[], *args,", "magnitude, phase, frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude = magnitude", "\"\"\" # fmt: off fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units,", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\",", "= speed_range self.magnitude = magnitude self.phase = phase def plot_magnitude(", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j", "self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[...,", "@ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] = ( _probe_resp[0] *", "showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ), ) return fig class", "units == \"m\": r_axis_label = \"<b>Amplitude (m)</b>\" elif units ==", "x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe", ") return fig def plot( self, probe, percentile=[], conf_interval=[], fig=None,", "phase def plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot", "+ \"Frequency: %{y:.3f}\"), **kwargs, ) ) for j in range(self.wd.shape[0]):", "%{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p += 1 x = np.concatenate((self.frequency_range,", "frequencies, phase of the frequency response for each pair input/output.", "legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(", "= 0 for i, p in enumerate(probe): dofx = p[0]", "provide plots for Time Response and Orbit Response. Parameters ----------", "import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default = \"browser\" #", "xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"),", "None: fig = go.Figure() color_i = 0 color_p = 0", "* n + 1], 50 + p / 2, axis=0", "**kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p", "the state vector. nodes_list: array list with nodes from a", "confidence interval: {p}%\", legendgroup=f\"Probe {i + 1} - confidence interval:", "speed range in rad/s. wd : array Array with the", "y=self.speed_range * h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j),", "v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0,", ") x = np.concatenate((self.time_range, self.time_range[::-1])) for j, p in enumerate(conf_interval):", "- p / 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3,", "else: r_axis_label = \"<b>Amplitude (dB)</b>\" for k, v in default_values.items():", "\"\"\"Plot orbit response (2D). This function plots orbits for a", "The figure object with the plot. \"\"\" conf_interval = np.sort(conf_interval)", "1], 50 + p / 2, axis=0) p4 = np.percentile(self.yout[...,", "data in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis,", "name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) for i,", "j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p", ") # fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0,", "Number of degrees of freedom per shaft element's node Returns", "2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p),", "go.Figure() color_i = 0 color_p = 0 for i, p", "/ 2, axis=1) p2 = np.percentile(self.wd[j], 50 - p /", "+ _probe_resp[1] * np.sin(angle) ** 2 ) # fmt: on", "off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]]", "the plot. \"\"\" def __init__(self, time_range, yout, xout, number_dof, nodes_list,", "col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800,", "**kwargs) subplots = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\":", ") ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self, node,", "provide plots for Forced Response. Parameters ---------- force_resp : array", "p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p),", "system in a 3D view. Parameters ---------- percentile : list,", "frequency_range : array Array with the frequencies. magnitude : array", "x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i +", "interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j == 0 else False,", "50 - p / 2, axis=1) p3 = np.percentile(self.phase, 50", "{p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p += 1 x", "legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs,", "- percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p +=", "frequency. \"\"\" def __init__(self, speed_range, magnitude, phase): self.speed_range = speed_range", "linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14),", "for Forced Response. Parameters ---------- force_resp : array Array with", "0]) for j, mag in enumerate(self.magnitude): _probe_resp = operator @", "in enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:, dofx], y[:, dofy]))", "hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"), **kwargs, ) ) for", "= np.sort(conf_interval) percentile = np.sort(percentile) if fig is None: fig", "fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1)", "np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x,", "vs frequency. Parameters ---------- percentile : list, optional Sequence of", "Two options for plooting are available: Matplotlib and Bokeh. The", "int indicate the node where the probe is located. orientation", "= magnitude self.phase = phase self.frequency_range = frequency_range self.number_dof =", "plots for Campbell Diagram. It's possible to visualize multiples harmonics", "x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i", "tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ), ) return fig def", "np.percentile(self.phase, 50 + p / 2, axis=1) p4 = np.percentile(self.phase,", "with amplitude vs frequency phase angle vs frequency. \"\"\" fig0", "and 100 inclusive. kwargs : optional Additional key word arguments", "name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j == 0 else", "{i + 1} - percentile: {p}%\", legendgroup=f\"Probe {i + 1}", "\"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, ) ) for", "{}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency:", "units : str, optional Unit system Default is \"mic-pk-pk\" kwargs", "frequency response for node for each frequency. phase : array", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_polar_bode( self,", "2, axis=1) p3 = np.percentile(self.phase, 50 + p / 2,", ") fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\",", "p2 = np.percentile(probe_resp, 50 - p / 2, axis=0) p3", "color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "[{}, None]] ) for data in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1)", "line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe {i", "forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude =", "polar forced response using Plotly. Parameters ---------- dof : int", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) for j, p in enumerate(percentile):", "color_p = 0 color_i = 0 for i, p in", "Plotly graph_objects.make_subplots() Plotly figure with diagrams for frequency and log", "**kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\",", "), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=(", "element's node. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure", "color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency:", "name=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" +", "\"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), )", "p / 2, axis=0) p4 = np.percentile(self.yout[..., ndof * node", "---------- speed_range : array Array with the speed range in", "interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "palette of colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class", "Bokeh. The user chooses between them using the attribute plot_type.", "- confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i", "plotly import io as pio from plotly.subplots import make_subplots from", "p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p,", "\"\"\"STOCHASTIC ROSS plotting module. This module returns graphs for each", "np.percentile(self.yout[..., ndof * node], 50 + p / 2, axis=0)", "p1 = np.percentile(self.log_dec[j], 50 + p / 2, axis=1) p2", "percentile=[], conf_interval=[], fig=None, *args, **kwargs ): \"\"\"Plot time response. This", "fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return fig", "kwargs.setdefault(k, v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for", "default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots", "= self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values = dict(showlegend=False) for k,", "colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic results and provide", "opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" +", "**kwargs, ) return fig def plot( self, probe, percentile=[], conf_interval=[],", "in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean -", "borderwidth=2, ), ) return subplots class ST_TimeResponseResults: \"\"\"Store stochastic results", "np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_phase,", "The figure object with the plot. \"\"\" if units ==", "2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\",", "figure with diagrams for frequency and log dec. \"\"\" def", "fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i],", "self.frequency_range = frequency_range self.number_dof = number_dof def plot_magnitude( self, probe,", "- p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])),", "Python Figure Reference for more information. Returns ------- fig :", "angle vs frequency. \"\"\" # fmt: off fig0 = self.plot_magnitude(probe,", "confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "\"\"\" if fig is None: fig = go.Figure() default_values =", "color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j == 0 else False, hovertemplate=(", "- percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile{p}\", hovertemplate=(", "axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j],", "= nodes_pos self.number_dof = number_dof def plot_1d( self, probe, percentile=[],", "name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y", "shaft. The 0 refers to +X direction. percentile : list,", "if fig is None: fig = go.Figure() color_i = 0", "as np from plotly import express as px from plotly", "thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\"", "for node for each frequency. phase : array Phase of", "angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]),", "phase response given an output and an input. Parameters ----------", "plot. \"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units", "%{customdata:.2f}</b>\" ), ) ) color_p += 1 for j, p", "conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units == \"m\":", "args : optional Additional plot axes kwargs : optional Additional", "object with the plot. \"\"\" if units == \"m\": y_axis_label", "**kwargs, ) ) for i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d(", ") fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", "- Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency:", "Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for j, p in", "go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p),", "system. Default is \"mic-pk-pk\" polar_kwargs : optional Additional key word", "Plotly graph_objects.Figure() Bokeh plot axes with magnitude plot. \"\"\" if", ": Plotly graph_objects.Figure() The figure object with the plot. \"\"\"", "probe_resp = np.zeros_like(self.yout[:, :, 0]) for j, y in enumerate(self.yout):", "1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X - Amplitude:", "Time evolution of the state vector. nodes_list: array list with", "phase response given a set of probes. Parameters ---------- probe", "Plotly. Parameters ---------- probe : list of tuples List with", "legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase:", "phase : array Array with the frequencies, phase of the", "axis=1) p2 = np.percentile(self.phase, 50 - p / 2, axis=1)", "enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs in", "* node], 50 - p / 2, axis=0) p3 =", "args: optional harmonics : list, optional List with the harmonics", "p4 = np.percentile(self.yout[..., ndof * node + 1], 50 -", "fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0,", "the speed range in rad/s. wd : array Array with", "%{customdata:.2f}</b>\" ), **kwargs, ) ) for i, p in enumerate(percentile):", "Frequency Response. Parameters ---------- speed_range : array Array with the", "linewidth=2.5, ), ), ) return fig def plot(self, percentile=[], conf_interval=[],", "\"\"\"Store stochastic results and provide plots for Campbell Diagram. It's", "axial positions. number_dof : int Number of degrees of freedom", "p, axis=0), z=np.percentile(self.yout[..., ndof * n + 1], p, axis=0),", "- confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", "np.sin(angle)) ** 2) # fmt: on probe_phase = np.zeros_like(self.phase[:, :,", "fig=None, *args, **kwargs): \"\"\"Plot orbit response (2D). This function plots", "fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 + p", "with magnitude plot. \"\"\" if units == \"m\": y_axis_label =", "fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), +", "= go.Figure() default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile =", ") fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return fig def", "\"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig", "with the Logarithmic decrement Returns ------- subplots : Plotly graph_objects.make_subplots()", "axis=1) p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1)", "enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range,", "/ 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\",", "plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the damped natural frequencies", "x=self.speed_range, y=self.speed_range * h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"),", "compute, which must be between 0 and 100 inclusive. harmonics:", "%{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\",", "and provide plots for Campbell Diagram. It's possible to visualize", "\"Frequency: %{y:.3f}\" ), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\",", "/ 2, axis=0) p2 = np.percentile(probe_resp, 50 - p /", "%{y:.2f}\"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace(", "fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\",", "Logarithmic decrement Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure", "an input using Plotly. Parameters ---------- percentile : list, optional", "np.array( [i + 2 * np.pi if i < 0", "axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5,", "line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) )", "hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label)", "+ \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) for j, p in", "legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\"", ") ) color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"),", "fig=None, *args, **kwargs): \"\"\"Plot orbit response (3D). This function plots", "+ \"Frequency: %{y:.3f}\"), **kwargs, ) ) for i, p in", "name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs,", "for i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof", "rotor system in a 3D view. Parameters ---------- percentile :", "\"\"\" def __init__(self, speed_range, magnitude, phase): self.speed_range = speed_range self.magnitude", "else: r_axis_label = \"<b>Amplitude (dB)</b>\" if fig is None: fig", "1], 50 + p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]),", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\",", "vs frequency. \"\"\" def __init__(self, speed_range, magnitude, phase): self.speed_range =", "axis=1) p4 = np.percentile(self.phase, 50 - p / 2, axis=1)", "{p}%\", ) ) color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"),", "enumerate(probe): dofx = p[0] * self.number_dof dofy = p[0] *", "fig def plot( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs,", "p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence", "specific natural frequency. Two options for plooting are available: Matplotlib", "= \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": y_axis_label = \"<b>Amplitude", "fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe", "= time_range self.yout = yout self.xout = xout self.nodes_list =", "go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0,", "plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot", "in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy]))", "response for each pair input/output. Returns ------- subplots : Plotly", "x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i),", "go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe", "range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean - Mode", "in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1,", "node], axis=0), y=np.mean(self.yout[..., ndof * node + 1], axis=0), opacity=1.0,", "name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])),", "in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p / 2,", "0 else i for i in aux_phase] ) angle =", "1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ),", "axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X - Amplitude:", "color=\"royalblue\"), opacity=1.0, ...) *See Plotly Python Figure Reference for more", "axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" +", "+ p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence", "method plots the phase response given an output and an", "v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x =", "* node + 1], 50 + p / 2, axis=0)", "the frequencies, phase of the frequency response for each pair", "- Y</b>\"), showspikes=False), ), ) return fig class ST_ForcedResponseResults: \"\"\"Store", "hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for j", "+ 1} - percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\"", "line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs,", "plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (3D).", "vs frequency phase angle vs frequency. \"\"\" def __init__(self, forced_resp,", "percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell Diagram. This method plots", "= np.percentile(self.wd[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter(", "for Frequency Response. Parameters ---------- speed_range : array Array with", "the frequency response for each pair input/output. phase : array", "angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\",", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label,", "array Array with the speed range in rad/s. magnitude :", "go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence", "np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) for i,", "The figure object with the plot. units : str, optional", "magnitude : array Magnitude of the frequency response for node", "\"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ", "phase, frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude = magnitude self.phase", "- X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ), ) return", "row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ),", "fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis,", "color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs,", "2}], [{}, None]] ) for data in fig0[\"data\"]: subplots.add_trace(data, row=1,", "np.percentile(self.log_dec[j], 50 + p / 2, axis=1) p2 = np.percentile(self.log_dec[j],", "x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) )", "using bokeh. Parameters ---------- percentile : list, optional Sequence of", "Number of degrees of freedom per shaft element's node. Returns", "x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter(", "\"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ", "%{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, ) ) for i,", "for data in fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1)", "yout : array System response. xout : array Time evolution", "plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency response. This", "a specific natural frequency. Two options for plooting are available:", "fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\",", "line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"),", "fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return fig def plot_3d(self, percentile=[], conf_interval=[],", "and an input using bokeh. Parameters ---------- percentile : list,", "in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof * node], 50 +", "node. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with", "conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency response. This method", "more information. Returns ------- fig : Plotly graph_objects.Figure() Bokeh plot", "p / 2, axis=0) p2 = np.percentile(probe_phase, 50 - p", "self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar", "fig0[\"data\"]: data.showlegend = False fig.add_trace(data, row=1, col=1) for data in", "- confidence interval: {p}%\", legendgroup=f\"Probe {i + 1} - confidence", "row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), ) return fig", "axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\"", "**kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return", "\"Phase: %{y:.2f}\"), **kwargs, ) ) for i, p in enumerate(percentile):", "excite a specific natural frequency. Two options for plooting are", "+ p / 2, axis=1) p2 = np.percentile(self.magnitude, 50 -", "with the frequencies, phase of the frequency response for each", "go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i],", "Array with the frequencies, magnitude (dB) of the frequency response", "np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range,", "---------- node : int Select the node to display the", "on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]),", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5,", "in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0),", "in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 + p / 2,", "v) for i, p in enumerate(probe): dofx = p[0] *", "y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency:", "...) *See Plotly Python Figure Reference for more information. Returns", "fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis,", "+ 1} - Mean\", legendgroup=f\"Probe {i + 1} - Mean\",", "frequencies vs frequency. Parameters ---------- percentile : list, optional Sequence", "of tuples List with tuples (node, orientation angle). node :", "plotting module. This module returns graphs for each type of", "------- subplots : Plotly graph_objects.make_subplots() Plotly figure with diagrams for", "legendgroup=\"conf_interval{}\".format(p), showlegend=True if j == 0 else False, hovertemplate=( \"Nodal", "+ 1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X -", "j + 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency:", "other speeds which also excite a specific natural frequency. Two", "array Array with the Logarithmic decrement Returns ------- subplots :", "n + 1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if", "np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1,", "- Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs,", "\"<b>Amplitude (dB)</b>\" for k, v in default_values.items(): kwargs.setdefault(k, v) fig", "self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs ): \"\"\"Plot time", "orientation angle). node : int indicate the node where the", ") ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"),", "fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3,", "\"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.add_trace( go.Scatter3d(", "object with the plot. \"\"\" ndof = self.number_dof default_values =", "title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", "name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" +", "** 2 + (_probe_resp[1] * np.sin(angle)) ** 2) # fmt:", "bokeh Parameters ---------- speed_range : array Array with the speed", "to display the respective orbit response. percentile : list, optional", "else: y_axis_label = \"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\") conf_interval =", "np.pi if i < 0 else i for i in", "{}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\",", "p[1] # fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)],", "50 + p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6,", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_FrequencyResponseResults:", "np.percentile(self.magnitude, 50 - p / 2, axis=1) p3 = np.percentile(self.phase,", "Amplitude: %{y:.2e}\" ), **kwargs, ) ) for i, p in", "np.sort(percentile) if fig is None: fig = go.Figure() color_i =", "go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 + p /", "+ 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) )", "compute, which must be between 0 and 100 inclusive. conf_interval", "100 inclusive. args: optional harmonics : list, optional List with", "\"Y - Amplitude: %{y:.2e}\" ), **kwargs, ) ) for i,", ") ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range,", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\",", "percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (2D). This", "be passed to change the plot layout (e.g. width=800, height=600,", ") ) for j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp,", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_log_dec(self, percentile=[],", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile:", "fig def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20),", "array Rotor nodes axial positions. number_dof : int Number of", ": Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase", "plot. \"\"\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile =", "col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\",", "{}</b>\".format(node)), return fig def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs):", "with the plot. \"\"\" if fig is None: fig =", "= np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range,", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j],", "<reponame>JuliaMota/ross<filename>ross/stochastic/st_results.py<gh_stars>0 \"\"\"STOCHASTIC ROSS plotting module. This module returns graphs for", "This method plots the time response given a tuple of", "optional List with the harmonics to be plotted. The default", "to change the plot layout (e.g. width=800, height=600, ...). *See", "), ) return subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic results and", "which must be between 0% and 100% inclusive. fig :", "to be plotted. The default is to plot 1x. kwargs", "provide plots for Frequency Response. Parameters ---------- speed_range : array", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p,", "in rad/s. wd : array Array with the damped natural", "from ross.plotly_theme import tableau_colors pio.renderers.default = \"browser\" # set Plotly", "an output and an input using Plotly. Parameters ---------- percentile", "2, axis=1) p2 = np.percentile(self.log_dec[j], 50 - p / 2,", ") fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude:", "- p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence", "nodes_pos): self.time_range = time_range self.yout = yout self.xout = xout", "= phs[:, p[0] * self.number_dof] probe_phase[i] = np.array( [i +", "int Degree of freedom. units : str Magnitude unit system.", "y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency:", "x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval:", "self.speed_range = speed_range self.wd = wd self.log_dec = log_dec def", "output and an input. Parameters ---------- percentile : list, optional", "check other speeds which also excite a specific natural frequency.", "line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs,", "= go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3,", "** 2) # fmt: on probe_phase = np.zeros_like(self.phase[:, :, 0])", "fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude:", "* np.pi if i < 0 else i for i", "go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace(", "+= 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig def plot_polar_bode(", "self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if fig is None: fig", "dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1]", "of freedom to observe the response. percentile : list, optional", "i), hoverinfo=\"none\", **kwargs, ) ) for i, p in enumerate(conf_interval):", "+ (_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on probe_phase", "go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0,", "in a 2D view. Parameters ---------- node : int Select", "/ 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1,", "%{y:.2f}\", ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter(", "i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node],", ": Plotly graph_objects.Figure() The figure object with the plot. units", "using Plotly. Parameters ---------- probe : list of tuples List", "for each pair input/output. Returns ------- subplots : Plotly graph_objects.make_subplots()", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_TimeResponseResults:", "line=dict(width=3, color=\"black\"), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y -", "color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig def", "p4 = np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace(", "\"browser\" # set Plotly palette of colors colors1 = px.colors.qualitative.Dark24", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x =", "color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) for", "def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the damped natural", "line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" +", "on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i +", "with the harmonics to be plotted. The default is to", "= operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0]", "col=1) for data in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1,", "zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ), ) return fig class ST_ForcedResponseResults:", ") for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase,", "\"\"\"Plot polar forced response using Plotly. Parameters ---------- probe :", "subplots : Plotly graph_objects.make_subplots() Plotly figure with diagrams for frequency", "List with tuples (node, orientation angle). node : int indicate", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[...,", "Magnitude unit system. Default is \"mic-pk-pk\" polar_kwargs : optional Additional", "fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the log_dec", "customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} -", ": list of tuples List with tuples (node, orientation angle).", "), ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar(", "+ 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor", "response given a tuple of probes with their nodes and", "- confidence interval: {p}%\", ) ) color_i += 1 fig.update_layout(", "speed_range : array Array with the speed range in rad/s.", "j, y in enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:, dofx],", "for data in fig0[\"data\"]: subplots.add_trace(data, 1, 1) for data in", "axis=0) p2 = np.percentile(probe_resp, 50 - p / 2, axis=0)", "self.speed_range = speed_range self.magnitude = magnitude self.phase = phase def", "go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5,", "list of tuples List with tuples (node, orientation angle). node", "in fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1,", "---------- time_range : 1-dimensional array Time array. yout : array", "title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes(", "input/output. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with", "1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self, probe, percentile=[],", "name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y", "+ 1], 50 + p / 2, axis=0) p4 =", "fig : Plotly graph_objects.Figure() The figure object with the plot.", "Array with the frequencies, phase of the frequency response for", "subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis,", "object with the plot. \"\"\" if fig is None: fig", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig", "if i < 0 else i for i in aux_phase]", "p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe", "p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n],", "legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs,", "title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\",", "( _probe_resp[0] * np.cos(angle) ** 2 + _probe_resp[1] * np.sin(angle)", "inclusive. harmonics: list, optional List withe the harmonics to be", "self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude vs frequency.", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic", "mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "given a tuple of probes with their nodes and orientations.", "go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[..., ndof", "cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) #", "+ p / 2, axis=1) p4 = np.percentile(self.phase, 50 -", "Sequence of confidence intervals to compute, which must be between", "2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1,", "percentile, conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs)", "for k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile,", "%{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\",", "- Mode {}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\"", "**kwargs): \"\"\"Plot frequency response. This method plots the frequency and", "of the frequency response for node for each frequency. phase", "p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i +", "fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe", "borderwidth=2, ), ) return fig def plot_polar_bode( self, percentile=[], conf_interval=[],", "= np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatter(", "* node], p, axis=0), y=np.percentile(self.yout[..., ndof * node + 1],", "\"\"\" import numpy as np from plotly import express as", "is None: fig = go.Figure() default_values = dict(mode=\"lines\") conf_interval =", "in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: subplots.add_trace(data,", "= p[1] # fmt: off operator = np.array( [[np.cos(angle), -", "ndof * node + 1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"),", "\"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values = dict(showlegend=False)", "{}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j == 0 else False, hovertemplate=(", "Plotly graph_objects.Figure() The figure object with the plot. \"\"\" conf_interval", "{}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency:", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\",", "== \"m\": r_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\":", "positions. number_dof : int Number of degrees of freedom per", "chooses between them using the attribute plot_type. The default is", "List withe the harmonics to be plotted. The default is", "axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j", "magnitude self.phase = phase def plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\",", "p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0]) for j,", "fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units,", "\"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) for j, p in enumerate(percentile):", "p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\"", "of confidence intervals to compute, which must be between 0", "\"\"\"Plot frequency response. This method plots the phase response given", "+ (_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on fig.add_trace(", "legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", )", "= \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" if", "can be passed to change the plot layout only (e.g.", "response magnitude. Parameters ---------- probe : list of tuples List", "**kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar(", "with the plot. \"\"\" if units == \"m\": y_axis_label =", "p[0] * self.number_dof] probe_phase[i] = np.array( [i + 2 *", "n + 1], 50 - p / 2, axis=0 ),", "{i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", )", "interval: {p}%\", legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\",", "- percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "Plotly graph_objects.Figure() The figure object with the plot. units :", ") for i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[...,", "natural frequencies vs frequency. Parameters ---------- percentile : list, optional", "Phase of the frequency response for node for each frequency.", "[[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:,", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\",", "thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1}", "- percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency:", "in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x = np.concatenate((self.speed_range,", "for i, p in enumerate(conf_interval): p1 = np.percentile(self.phase, 50 +", "opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase:", "color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}% - Mode {}\".format(p,", "color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self,", "for each frequency. number_dof = int Number of degrees of", "line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j", "* self.number_dof + 1 angle = p[1] # fmt: off", "+ p / 2, axis=1) p2 = np.percentile(self.wd[j], 50 -", "= go.Figure() color_i = 0 color_p = 0 for i,", "2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]),", "axis=1) p2 = np.percentile(self.wd[j], 50 - p / 2, axis=1)", "\"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" default_values =", "is None: fig = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\",", "color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs,", "go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[..., ndof *", "default is bokeh Parameters ---------- speed_range : array Array with", "**kwargs, ): \"\"\"Plot amplitude vs frequency. This method plots the", "Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[...,", ": Plotly graph_objects.Figure() The figure object with the plot. kwargs", "inclusive. args: optional harmonics : list, optional List with the", "return subplots class ST_TimeResponseResults: \"\"\"Store stochastic results and provide plots", "pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" if fig is None:", "%{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.time_range, self.time_range[::-1])) for j,", "i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p", "np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.phase,", "go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", )", "the frequency response for node for each frequency. number_dof =", "/ 2, axis=1) p2 = np.percentile(self.phase, 50 - p /", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6,", "np.percentile(probe_resp, 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x,", "50 + p / 2, axis=0) p4 = np.percentile(self.yout[..., ndof", "Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\",", "fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", "self.time_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5,", "number_dof = int Number of degrees of freedom per shaft", "row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2,", "enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], p, axis=0),", "aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i] = np.array( [i", "go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe", "percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase angle response. This method plots", "0 and 100 inclusive. conf_interval : list, optional Sequence of", "(μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" if fig is", "tuples (node, orientation angle). node : int indicate the node", "- Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for j, p", "x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i +", "of confidence intervals to compute, which must be between 0%", "plots for Forced Response. Parameters ---------- force_resp : array Array", "- p / 2, axis=0) p3 = np.percentile(probe_phase, 50 +", "= np.sort(conf_interval) percentile = np.sort(percentile) if units == \"m\": r_axis_label", "fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i", "percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the damped natural frequencies vs", "phase response given a set of probes. Parameters ---------- dof", "1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs,", ") ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[...,", "\"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) color_p +=", "fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i + 1} - confidence interval: {p}%\",", "= log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the", ") return fig def plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs,", "_probe_resp = operator @ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] =", "2, axis=0) p2 = np.percentile(self.yout[..., ndof * node], 50 -", "{}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ),", "None: fig = go.Figure() default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval)", "color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p", "given an output and an input. Parameters ---------- percentile :", "hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) for j, p in enumerate(percentile):", "fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig def plot_polar_bode( self, probe, percentile=[],", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "p / 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])),", "This method plots the frequency response magnitude given an output", "x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[..., ndof * node +", "v) fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots = make_subplots(rows=1,", "+ \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, ) )", "units=units, **kwargs) if fig is None: fig = make_subplots( rows=2,", "evolution of the state vector. nodes_list: array list with nodes", "y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i +", "Plotly graph_objects.Figure() The figure object with the plot. \"\"\" default_values", "System response. xout : array Time evolution of the state", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]),", "y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p),", "**kwargs) if fig is None: fig = make_subplots( rows=2, cols=2,", "ROSS plotting module. This module returns graphs for each type", "units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response. This method plots the frequency", "axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} -", "px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic results and provide plots for", "are available: Matplotlib and Bokeh. The user chooses between them", "plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response. This method", "p3 = np.percentile(self.phase, 50 + p / 2, axis=1) p4", "The figure object with the plot. \"\"\" def __init__(self, time_range,", "frequency phase angle vs frequency. \"\"\" # fmt: off fig0", "of the frequency response for each pair input/output. Returns -------", "the rotor system in a 2D view. Parameters ---------- node", "plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude vs", "0 else False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X -", "), **kwargs, ) return fig def plot( self, probe, percentile=[],", "= np.percentile(probe_resp, 50 - p / 2, axis=0) p3 =", "row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2,", "plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default = \"browser\"", "fmt: on probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs", "on for data in fig0[\"data\"]: data.showlegend = False fig.add_trace(data, row=1,", "word arguments can be passed to change the plot (e.g.", "np.percentile(probe_resp, 50 + p / 2, axis=0) p2 = np.percentile(probe_resp,", "/ 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\",", "log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the damped", "interval: {}% - Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i),", "x=x, y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[..., ndof * n", "Default is \"mic-pk-pk\". kwargs : optional Additional key word arguments", "and orientations. Parameters ---------- probe : list of tuples List", "Unit system Default is \"mic-pk-pk\" kwargs : optional Additional key", "xout self.nodes_list = nodes_list self.nodes_pos = nodes_pos self.number_dof = number_dof", "decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", "+ \"Phase: %{y:.2f}\"), **kwargs, ) ) for i, p in", "array Array with the force response for each node for", "the harmonics to be plotted. The default is to plot", "= np.percentile(self.magnitude, 50 + p / 2, axis=1) p2 =", "and 100% inclusive. units : str, optional Unit system Default", "x=np.percentile(self.yout[..., ndof * node], p, axis=0), y=np.percentile(self.yout[..., ndof * node", "r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe", "log_dec : array Array with the Logarithmic decrement Returns -------", "opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\",", "mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe", "None: fig = go.Figure() color_p = 0 color_i = 0", "make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]]", "{p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p += 1 x", "theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i", "**kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self,", "= 0 color_i = 0 for i, p in enumerate(probe):", "polar forced response using Plotly. Parameters ---------- probe : list", "Position: %{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\" + \"Y -", "1} - Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=(", "orientation : float, probe orientation angle about the shaft. The", "mode=\"lines\", ) ) for j, n in enumerate(self.nodes_list): x =", "= xout self.nodes_list = nodes_list self.nodes_pos = nodes_pos self.number_dof =", "= number_dof def plot_1d( self, probe, percentile=[], conf_interval=[], fig=None, *args,", "%{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.speed_range,", "str Magnitude unit system. Default is \"mic-pk-pk\" polar_kwargs : optional", "%{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) for i, p", "1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\",", "conf_interval, units, **kwargs) subplots = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\":", "orientations. Parameters ---------- probe : list of tuples List with", "freedom. units : str Magnitude unit system. Default is \"mic-pk-pk\"", "1x. kwargs : optional Additional key word arguments can be", "+ p / 2, axis=0) p2 = np.percentile(probe_resp, 50 -", ": list, optional Sequence of confidence intervals to compute, which", "fig def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response.", "2) # fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0,", "to visualize multiples harmonics in a single plot to check", "fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2,", "name=f\"Probe {i + 1} - percentile: {p}%\", legendgroup=f\"Probe {i +", "This method plots the phase response given a set of", "- Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) for j, p", "1} - confidence interval: {p}%\", legendgroup=f\"Probe {i + 1} -", "borderwidth=2, ), ) return fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1],", ": str Magnitude unit system. Default is \"mic-pk-pk\" polar_kwargs :", "__init__(self, speed_range, magnitude, phase): self.speed_range = speed_range self.magnitude = magnitude", "+ \"X - Amplitude: %{y:.2e}<br>\" + \"Y - Amplitude: %{z:.2e}\"", "fig is None: fig = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\":", "+ 1} - confidence interval: {p}%\", ) ) color_i +=", "ndof * node + 1], 50 + p / 2,", "int Number of degrees of freedom per shaft element's node.", "%{x:.2f}<br>Phase: %{y:.2f}\", ) ) for j, p in enumerate(percentile): fig.add_trace(", "import tableau_colors pio.renderers.default = \"browser\" # set Plotly palette of", "customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude:", "row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1,", "probes with their nodes and orientations. Parameters ---------- probe :", "= np.sort(percentile) if fig is None: fig = go.Figure() color_i", "x=x, y=np.percentile(self.yout[..., ndof * n], 50 - p / 2,", "p / 2, axis=0) p3 = np.percentile(self.yout[..., ndof * node", "an input. Parameters ---------- percentile : list, optional Sequence of", "the frequencies, magnitude (dB) of the frequency response for each", "given an output and an input using Plotly. Parameters ----------", "width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\",", "= number_dof def plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\",", "j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean", "(3D). This function plots orbits for each node on the", ": optional Additional plot axes kwargs : optional Additional key", "node for each frequency. number_dof = int Number of degrees", "bordercolor=\"black\", borderwidth=2, ), ) return fig def plot(self, percentile=[], conf_interval=[],", "= np.zeros_like(self.magnitude[:, :, 0]) for j, mag in enumerate(self.magnitude): _probe_resp", "Returns ------- fig : Plotly graph_objects.Figure() Bokeh plot axes with", "fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i + 1}", "color_p += 1 for j, p in enumerate(conf_interval): p1 =", "2, axis=0) p2 = np.percentile(probe_phase, 50 - p / 2,", "magnitude (dB) of the frequency response for each pair input/output.", "col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\",", "legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\"", "default_values.items(): kwargs.setdefault(k, v) if fig is None: fig = go.Figure()", "fig.update_layout(**kwargs) return fig def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs):", "if fig is None: fig = go.Figure() default_values = dict(mode=\"lines\")", "figure object with the plot. kwargs : optional Additional key", "---------- dof : int Degree of freedom to observe the", "borderwidth=2, ), ) return subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic results", "* n], 50 - p / 2, axis=0), z=np.percentile( self.yout[...,", "fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self, probe, percentile=[], conf_interval=[],", "with the speed range in rad/s. magnitude : array Array", "and provide plots for Frequency Response. Parameters ---------- speed_range :", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node], p,", "Plotly figure with amplitude vs frequency phase angle vs frequency.", "harmonics in a single plot to check other speeds which", "hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\")", "subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic results and provide plots for", "y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i),", "user chooses between them using the attribute plot_type. The default", "fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3,", "# fmt: on probe_phase = np.zeros_like(self.phase[:, :, 0]) for j,", "np.percentile(self.magnitude, 50 + p / 2, axis=1) p2 = np.percentile(self.magnitude,", "+ 1} - confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude:", "= np.percentile(self.yout[..., ndof * node], 50 + p / 2,", "* n], axis=0), z=np.mean(self.yout[..., ndof * n + 1], axis=0),", "graph_objects as go from plotly import io as pio from", "i, p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof *", "n], 50 + p / 2, axis=0), z=np.percentile( self.yout[..., ndof", "+ 1} - Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\"", "[{}, None]] ) # fmt: on for data in fig0[\"data\"]:", "phase response given an output and an input using bokeh.", "= np.percentile(self.yout[..., ndof * node], 50 - p / 2,", "can be passed to change the plot (e.g. line=dict(width=4.0, color=\"royalblue\"),", "amplitude vs frequency phase angle vs frequency. \"\"\" def __init__(self,", "for i, p in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof *", "- Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor", "= np.percentile(self.log_dec[j], 50 + p / 2, axis=1) p2 =", "fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "magnitude plot. \"\"\" if units == \"m\": y_axis_label = \"<b>Amplitude", "with tuples (node, orientation angle). node : int indicate the", "conf_interval : list, optional Sequence of confidence intervals to compute,", "go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j +", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\",", "if fig is None: fig = go.Figure() color_p = 0", "legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14),", "%{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) color_p += 1", ") ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude,", "angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return fig def plot( self, probe,", "to change the plot layout only (e.g. width=1000, height=800, ...).", "self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots = make_subplots(rows=1, cols=2) for data", "self.magnitude = magnitude self.phase = phase self.frequency_range = frequency_range self.number_dof", "gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ), ) return fig def plot(self,", "0 and 100 inclusive. harmonics: list, optional List withe the", "+ p / 2, axis=0) p4 = np.percentile(probe_phase, 50 -", "* np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle)) ** 2)", "= self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values = dict(showlegend=False) for k,", "go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p),", "fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\"", "key word arguments can be passed to change the plot", "line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"),", "np.percentile(self.wd[j], 50 + p / 2, axis=1) p2 = np.percentile(self.wd[j],", "), ) return fig def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs):", "for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p,", "np.concatenate((self.time_range, self.time_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp,", "= np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"),", ") color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def", "\"rowspan\": 2}], [{}, None]] ) # fmt: on for data", "compute, which must be between 0 and 100 inclusive. fig", "== 0 else False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X", "operator @ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] = ( _probe_resp[0]", "**kwargs): \"\"\"Plot the log_dec vs frequency. Parameters ---------- percentile :", "np.zeros_like(self.magnitude[:, :, 0]) for j, mag in enumerate(self.magnitude): _probe_resp =", "units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response using Plotly. Parameters", "def plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase angle response. This", "probe_resp[j] = ( _probe_resp[0] * np.cos(angle) ** 2 + _probe_resp[1]", "view. Parameters ---------- percentile : list, optional Sequence of percentiles", "width=800, height=600, ...). *See Plotly Python Figure Reference for more", "%{y:.3f}\" ), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20),", "layout only (e.g. width=1000, height=800, ...). *See Plotly Python Figure", "phase angle vs frequency. \"\"\" # fmt: off fig0 =", "fig = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}],", ": list, optional List with the harmonics to be plotted.", "/ 2, axis=0) p4 = np.percentile(self.yout[..., ndof * node +", "x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i + 1} - Mean\",", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]),", "plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot", "Parameters ---------- speed_range : array Array with the speed range", "range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean - Mode", "in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean -", "name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", ") fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", ") color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ),", "for k, v in default_values.items(): kwargs.setdefault(k, v) if fig is", "angle = p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatterpolar(", "node : int indicate the node where the probe is", "* node + 1], 50 - p / 2, axis=0)", ": array Array with the speed range in rad/s. magnitude", "be between 0% and 100% inclusive. units : str, optional", "subplots.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1)", "method plots the unbalance response magnitude. Parameters ---------- probe :", "for more information. Returns ------- fig : Plotly graph_objects.Figure() Bokeh", "= self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots = make_subplots( rows=2, cols=2,", "shaft element's node Returns ------- fig : Plotly graph_objects.Figure() The", "hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p += 1 x =", "0 for i, p in enumerate(probe): dofx = p[0] *", "in a single plot to check other speeds which also", "for k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile,", "color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j ==", "(μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\")", "+ \"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) for", "i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1),", "Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False),", "height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), )", "degrees of freedom per shaft element's node Returns ------- fig", "1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig def plot_polar_bode( self,", "p2 = np.percentile(self.wd[j], 50 - p / 2, axis=1) fig.add_trace(", ") for data in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for data", "conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response using", "phs[:, p[0] * self.number_dof] probe_phase[i] = np.array( [i + 2", "%{customdata:.2f}</b>\" ), **kwargs, ) ) for i, p in enumerate(conf_interval):", "col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1)", "Parameters ---------- percentile : list, optional Sequence of percentiles to", "element's node Returns ------- fig : Plotly graph_objects.Figure() The figure", "x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval:", "must be between 0 and 100 inclusive. args: optional harmonics", "response using Plotly. Parameters ---------- dof : int Degree of", "row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\",", "for more information. Returns ------- fig : Plotly graph_objects.Figure() The", "p in enumerate(probe): dofx = p[0] * self.number_dof dofy =", "**kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if fig", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_log_dec(self,", "node for each frequency. phase : array Phase of the", "response for node for each frequency. number_dof = int Number", "col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), )", "{\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) # fmt: on", "n], p, axis=0), z=np.percentile(self.yout[..., ndof * n + 1], p,", "), ) return fig def plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\",", "j, n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace(", "to plot 1x. kwargs : optional Additional key word arguments", "+ 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"),", "for j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0),", "1} - confidence interval: {p}%\", ) ) color_i += 1", "= np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatter(", "1} - Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency:", "withe the harmonics to be plotted. The default is to", "return fig def plot(self, percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell", "50 + p / 2, axis=1) p2 = np.percentile(self.magnitude, 50", "between 0% and 100% inclusive. fig : Plotly graph_objects.Figure() The", ": array Array with the Logarithmic decrement Returns ------- subplots", "log dec. \"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values", "percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response", "width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), )", ":, 0]) for j, phs in enumerate(self.phase): aux_phase = phs[:,", "inclusive. units : str, optional Unit system Default is \"mic-pk-pk\".", "the plot layout (e.g. width=800, height=600, ...). *See Plotly Python", "array Array with the frequencies. magnitude : array Magnitude of", "legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig", "is None: fig = go.Figure() color_p = 0 color_i =", "+ \"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.update_layout(", "each type of analyses in st_rotor_assembly.py. \"\"\" import numpy as", "p[0] * self.number_dof + 1 angle = p[1] # fmt:", "i), hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20),", "in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node], p, axis=0),", "the damped natural frequencies log_dec : array Array with the", "{}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j == 0 else", "go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i", "np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :, 0])", "px from plotly import graph_objects as go from plotly import", "y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" +", "each node on the rotor system in a 3D view.", "fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"),", "plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the log_dec vs frequency.", "plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ),", "50 - p / 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])),", "fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0),", "y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j + 1), line=dict(width=3,", "- percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile: {p}%\",", "phs in enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i]", "object with the plot. units : str, optional Unit system", "fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values = dict(showlegend=False) for", "be between 0 and 100 inclusive. units : str, optional", "state vector. nodes_list: array list with nodes from a rotor", "0 color_i = 0 for i, p in enumerate(probe): dofx", "legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for", "list, optional Sequence of confidence intervals to compute, which must", "50 + p / 2, axis=0) p2 = np.percentile(probe_resp, 50", "frequency response. This method plots the frequency and phase response", "\"mic-pk-pk\". kwargs : optional Additional key word arguments can be", "def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the log_dec vs", "x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i +", "natural frequencies log_dec : array Array with the Logarithmic decrement", "graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle vs", "axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1}", "theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i", "of probes with their nodes and orientations. Parameters ---------- probe", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6,", "p / 2, axis=1) p3 = np.percentile(self.phase, 50 + p", "v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval,", "x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1", ") ) for j, n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1])", "\"\"\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "time_range : 1-dimensional array Time array. yout : array System", "Default is \"mic-pk-pk\" polar_kwargs : optional Additional key word arguments", "and Bokeh. The user chooses between them using the attribute", "plooting are available: Matplotlib and Bokeh. The user chooses between", "of the state vector. nodes_list: array list with nodes from", "- Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude: %{y:.2e}\" ), **kwargs,", "interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X", "for k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure()", "kwargs : optional Additional key word arguments can be passed", "and 100 inclusive. harmonics: list, optional List withe the harmonics", "\"\"\" if units == \"m\": y_axis_label = \"<b>Amplitude (m)</b>\" elif", "pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" for k, v in", "Time array. yout : array System response. xout : array", "and 100% inclusive. fig : Plotly graph_objects.Figure() The figure object", "fig=None, **kwargs): \"\"\"Plot frequency response. This method plots the phase", "= \"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile", "line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\"", "must be between 0% and 100% inclusive. units : str,", "with nodes from a rotor model. nodes_pos: array Rotor nodes", "{}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude:", "for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p,", "), ) return fig def plot(self, percentile=[], conf_interval=[], *args, **kwargs):", "Amplitude: %{z:.2e}\" ), **kwargs, ) ) for i, p in", "linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", "axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if j == 0", "fig.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: fig.add_trace(data, row=1, col=2)", "+ 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", )", "50 + p / 2, axis=0) p2 = np.percentile(probe_phase, 50", "= np.percentile(self.wd[j], 50 + p / 2, axis=1) p2 =", "orbits for each node on the rotor system in a", "plot_type. The default is bokeh Parameters ---------- speed_range : array", "import numpy as np from plotly import express as px", "input using bokeh. Parameters ---------- percentile : list, optional Sequence", "indicate the node where the probe is located. orientation :", "go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i", "node Returns ------- fig : Plotly graph_objects.Figure() The figure object", "legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for", "plot_1d( self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs ): \"\"\"Plot", "list, optional List withe the harmonics to be plotted. The", "of probes. Parameters ---------- probe : list of tuples List", "opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout( polar=dict(", "dec. \"\"\" def __init__(self, speed_range, wd, log_dec): self.speed_range = speed_range", "dofx], y[:, dofy])) probe_resp[j] = ( _probe_resp[0] * np.cos(angle) **", "mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", "{i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", )", "p1 = np.percentile(self.wd[j], 50 + p / 2, axis=1) p2", "color_i = 0 for i, p in enumerate(probe): dofx =", "units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude vs frequency. This method plots", "radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return fig def plot(", "response for each pair input/output. phase : array Array with", "must be between 0 and 100 inclusive. units : str,", ") ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", ": array Phase of the frequency response for node for", "conf_interval, *args, **kwargs) default_values = dict(showlegend=False) for k, v in", "fig def plot(self, percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell Diagram.", "The figure object with the plot. \"\"\" if fig is", "self.wd = wd self.log_dec = log_dec def plot_nat_freq(self, percentile=[], conf_interval=[],", "r_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": r_axis_label =", "self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval,", "r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5,", "): \"\"\"Plot frequency response. This method plots the frequency and", "kwargs.setdefault(k, v) if fig is None: fig = go.Figure() fig.add_trace(", "color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\"", "conf_interval=[], **kwargs): \"\"\"Plot phase angle response. This method plots the", "v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatterpolar(", "\"mic-pk-pk\" kwargs : optional Additional key word arguments can be", "for plooting are available: Matplotlib and Bokeh. The user chooses", "p / 2, axis=0) p2 = np.percentile(probe_resp, 50 - p", "go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], p, axis=0), z=np.percentile(self.yout[..., ndof", "Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\"", "size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ),", "array Array with the speed range in rad/s. wd :", "axis=1) p2 = np.percentile(self.log_dec[j], 50 - p / 2, axis=1)", "polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14),", "/ 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p),", "%{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for j, p", "percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p += 1", "/ 2, axis=0) p3 = np.percentile(probe_phase, 50 + p /", "plot. \"\"\" if units == \"m\": y_axis_label = \"<b>Amplitude (m)</b>\"", "Parameters ---------- time_range : 1-dimensional array Time array. yout :", "v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs)", "Parameters ---------- dof : int Degree of freedom. units :", "np.sort(percentile) if fig is None: fig = go.Figure() color_p =", "colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store", "dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) ) for j, n in enumerate(self.nodes_list):", "the phase response given an output and an input using", ") ) for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j],", "- p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])),", "conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (2D). This function", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6,", "= \"browser\" # set Plotly palette of colors colors1 =", "fig is None: fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof", "Array with the speed range in rad/s. wd : array", "+ 1], 50 - p / 2, axis=0 ), line=dict(width=3.5,", "opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} - percentile:", "and 100 inclusive. conf_interval : list, optional Sequence of confidence", "return fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the", "enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof * node], 50 + p", "plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\",", "ndof * node], 50 + p / 2, axis=0) p2", "fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i],", "\"<b>Amplitude (dB)</b>\" if fig is None: fig = go.Figure() color_p", "in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50", "object with the plot. units : str Magnitude unit system.", "percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v)", "pair input/output. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure", "fig0[\"data\"]: subplots.add_trace(data, 1, 1) for data in fig1[\"data\"]: subplots.add_trace(data, 1,", "for k, v in default_values.items(): kwargs.setdefault(k, v) for i, p", "of the frequency response for node for each frequency. number_dof", "optional List withe the harmonics to be plotted. The default", "model. nodes_pos: array Rotor nodes axial positions. number_dof : int", "i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof *", "figure object with the plot. \"\"\" conf_interval = np.sort(conf_interval) percentile", "+ \"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.add_trace(", "fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", "fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j in", "j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0),", "%{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.time_range,", "= dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units", "Amplitude: %{y:.2e}<br>\" + \"Y - Amplitude: %{z:.2e}\" ), **kwargs, )", "= np.zeros_like(self.yout[:, :, 0]) for j, y in enumerate(self.yout): _probe_resp", "= np.sort(percentile) if fig is None: fig = go.Figure() color_p", ") color_p += 1 for j, p in enumerate(conf_interval): p1", "angle = p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatter(", "enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node], p, axis=0), y=np.percentile(self.yout[...,", "using the attribute plot_type. The default is bokeh Parameters ----------", ") ) x = np.concatenate((self.time_range, self.time_range[::-1])) for j, p in", ": str, optional Unit system Default is \"mic-pk-pk\" kwargs :", "fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return fig def plot_3d(self,", "plotly import express as px from plotly import graph_objects as", "col=1) for data in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for data", "magnitude self.phase = phase self.frequency_range = frequency_range self.number_dof = number_dof", "percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency response. This method plots", "Plotly graph_objects.Figure() The figure object with the plot. kwargs :", "2, axis=1) p2 = np.percentile(self.wd[j], 50 - p / 2,", "50 + p / 2, axis=1) p2 = np.percentile(self.phase, 50", "rotor model. nodes_pos: array Rotor nodes axial positions. number_dof :", ") for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1),", "Bokeh plot axes with magnitude plot. \"\"\" if units ==", ") ) color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for", "color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position:", "response using Plotly. Parameters ---------- probe : list of tuples", ") ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.wd[j],", "2}], [{}, None]] ) # fmt: on for data in", "in fig0[\"data\"]: data.showlegend = False fig.add_trace(data, row=1, col=1) for data", "/ 2, axis=1) p3 = np.percentile(self.phase, 50 + p /", "linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "Matplotlib and Bokeh. The user chooses between them using the", ": array Array with the frequencies, phase of the frequency", "------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs", "int Select the node to display the respective orbit response.", "ndof * n + 1], p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p),", "+ 1], 50 + p / 2, axis=0 ), line=dict(width=3.5,", "array Array with the frequencies, phase of the frequency response", "probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency response.", "frequency. \"\"\" # fmt: off fig0 = self.plot_magnitude(probe, percentile, conf_interval,", "x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence", "row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\",", "display the respective orbit response. percentile : list, optional Sequence", "= np.percentile(self.magnitude, 50 - p / 2, axis=1) p3 =", "p / 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])),", "1} - percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile:", "- percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p +=", "legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" +", "h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0,", "fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase angle response.", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\",", "polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2,", "row=1, col=1) for data in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for", "n + 1], p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]),", "2, axis=0) p3 = np.percentile(probe_phase, 50 + p / 2,", "100 inclusive. harmonics: list, optional List withe the harmonics to", "(s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self, node, percentile=[], conf_interval=[], fig=None,", "= np.percentile(probe_resp, 50 - p / 2, axis=0) fig.add_trace( go.Scatter(", "p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i +", "time_range, yout, xout, number_dof, nodes_list, nodes_pos): self.time_range = time_range self.yout", "), ) ) color_p += 1 for j, p in", "= self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval,", "100 inclusive. conf_interval : list, optional Sequence of confidence intervals", "go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"),", "go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6,", "conf_interval=[], fig=None, *args, **kwargs ): \"\"\"Plot time response. This method", "# fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\",", "intervals to compute, which must be between 0% and 100%", "%{y:.2e}\", ) ) color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1]))", "arguments can be passed to change the plot layout (e.g.", "colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic", "i, p in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 + p", "------- fig : Plotly graph_objects.Figure() The figure object with the", ") for i, p in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof", "interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, )", "* n], 50 + p / 2, axis=0), z=np.percentile( self.yout[...,", "name=\"Mean - Mode {}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency:", "shaft element's node. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly", "log dec. \"\"\" def __init__(self, speed_range, wd, log_dec): self.speed_range =", "2 + (_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on", "range in rad/s. magnitude : array Array with the frequencies,", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6,", "fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig def plot_polar_bode( self, probe,", "with the plot. units : str Magnitude unit system. Default", "Select the node to display the respective orbit response. percentile", "= np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig", "p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p,", "**kwargs, ): \"\"\"Plot frequency response. This method plots the unbalance", "r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5,", "= np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof *", "(node, orientation angle). node : int indicate the node where", "axis=0), z=np.percentile( self.yout[..., ndof * n + 1], 50 +", "Figure Reference for more information. Returns ------- subplots : Plotly", "frequency and phase response given a set of probes. Parameters", "for j, mag in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:,", "self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50", "name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency:", "line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, )", "frequency. Parameters ---------- percentile : list, optional Sequence of percentiles", "2, axis=0), z=np.percentile( self.yout[..., ndof * n + 1], 50", "response for node for each frequency. phase : array Phase", "data.showlegend = False fig.add_trace(data, row=2, col=1) for data in fig2[\"data\"]:", "j, h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h,", "rotor system in a 2D view. Parameters ---------- node :", "2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j],", "50 - p / 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])),", "nodes from a rotor model. nodes_pos: array Rotor nodes axial", "2 + _probe_resp[1] * np.sin(angle) ** 2 ) # fmt:", "freedom per shaft element's node. Returns ------- subplots : Plotly", "j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase,", "{}% - Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=(", "and log dec. \"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs)", "fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return fig def plot_3d(self, percentile=[],", "100% inclusive. units : str, optional Unit system Default is", "\"Frequency: %{y:.3f}\"), **kwargs, ) ) for j in range(self.wd.shape[0]): fig.add_trace(", "Parameters ---------- node : int Select the node to display", "plots the unbalance response magnitude. Parameters ---------- probe : list", "opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j ==", "np.percentile(probe_resp, 50 - p / 2, axis=0) p3 = np.percentile(probe_phase,", "numpy as np from plotly import express as px from", "def plot(self, percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell Diagram. This", "plotly import graph_objects as go from plotly import io as", "y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j + 1), line=dict(width=3,", "confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i +=", "legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x", ") x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval):", "forced response using Plotly. Parameters ---------- dof : int Degree", "** 2) # fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp, axis=0),", "p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if", "response. This method plots the time response given a tuple", "/ 2, axis=0), z=np.percentile( self.yout[..., ndof * n + 1],", "probes. Parameters ---------- dof : int Degree of freedom to", "n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d(", "figure with diagrams for frequency and log dec. \"\"\" fig0", "self.number_dof dofy = p[0] * self.number_dof + 1 angle =", "is bokeh Parameters ---------- speed_range : array Array with the", "opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"),", "0 color_p = 0 for i, p in enumerate(probe): dofx", "%{y:.2e}\"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace(", "x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j + 1),", "Orbit: node {}</b>\".format(node)), return fig def plot_3d(self, percentile=[], conf_interval=[], fig=None,", "refers to +X direction. percentile : list, optional Sequence of", "line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X", "ndof * node], 50 - p / 2, axis=0) p3", "percentile : list, optional Sequence of percentiles to compute, which", ") ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "Plotly palette of colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24", "passed to change the plot layout only (e.g. width=1000, height=800,", "x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\"", "(2D). This function plots orbits for a given node on", "units == \"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label", "%{z:.2e}\" ), **kwargs, ) ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof", "axis=0), y=np.mean(self.yout[..., ndof * node + 1], axis=0), opacity=1.0, name=\"Mean\",", "mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 +", "enumerate(conf_interval): p1 = np.percentile(self.phase, 50 + p / 2, axis=1)", "Mode {}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" +", "p2 = np.percentile(self.yout[..., ndof * node], 50 - p /", "%{customdata:.2f}</b>\" ), ) ) for j, p in enumerate(percentile): fig.add_trace(", "np.percentile(probe_phase, 50 + p / 2, axis=0) p4 = np.percentile(probe_phase,", "%{y:.2e}\", ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter(", "Diagram. This method plots Campbell Diagram. Parameters ---------- percentile :", "{}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" +", "with the frequencies. magnitude : array Magnitude of the frequency", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20),", "np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp,", "y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i),", "1} - percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", "kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile,", "customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" +", "= go.Figure() color_p = 0 color_i = 0 for i,", "y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i +", "**kwargs) default_values = dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k,", "50 + p / 2, axis=0) p2 = np.percentile(self.yout[..., ndof", "- Amplitude: %{y:.2e}\" ), **kwargs, ) ) for i, p", "+ 1} - confidence interval: {p}%\", legendgroup=f\"Probe {i + 1}", "np.percentile(self.log_dec[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x,", "response for each node for each frequency. frequency_range : array", "p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval:", "%{z:.2e}\" ), **kwargs, ) ) for i, p in enumerate(conf_interval):", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return", "i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1),", "angle). node : int indicate the node where the probe", "p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n],", "%{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for j in", "\"\"\"Store stochastic results and provide plots for Time Response and", "each frequency. number_dof = int Number of degrees of freedom", "Additional key word arguments can be passed to change the", "fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots = make_subplots( rows=2,", "probe, percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency response. This method", "This module returns graphs for each type of analyses in", "%{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i,", "%{y:.3f}\"), **kwargs, ) ) for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter(", "= np.percentile(self.phase, 50 + p / 2, axis=1) p4 =", "subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis,", "np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :, 0]) for j, y", "specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) # fmt:", "# fmt: on for data in fig0[\"data\"]: data.showlegend = False", "default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1]))", "be between 0 and 100 inclusive. args: optional harmonics :", "/ 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1,", "axis=0), z=np.mean(self.yout[..., ndof * n + 1], axis=0), line=dict(width=5, color=\"black\"),", "frequency_range, number_dof): self.forced_resp = forced_resp self.magnitude = magnitude self.phase =", "{p}%\", legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\", hovertemplate=\"Frequency:", "graph_objects.Figure() The figure object with the plot. \"\"\" ndof =", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]),", "optional Sequence of percentiles to compute, which must be between", ") fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "dofy = p[0] * self.number_dof + 1 angle = p[1]", "output and an input using bokeh. Parameters ---------- percentile :", "linecolor=\"black\", linewidth=2.5, ), ), ) return fig def plot(self, percentile=[],", "for each frequency. frequency_range : array Array with the frequencies.", "opacity=0.5, name=f\"Probe {i + 1} - confidence interval: {p}%\", legendgroup=f\"Probe", "in enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i] =", "response (2D). This function plots orbits for a given node", "compute, which must be between 0 and 100 inclusive. units", "# fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe", "axis=0), opacity=1.0, name=f\"Probe {i + 1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time:", "units : str Magnitude unit system. Default is \"mic-pk-pk\" polar_kwargs", "p2 = np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace(", "100 inclusive. kwargs : optional Additional key word arguments can", "= np.array( [i + 2 * np.pi if i <", "which must be between 0 and 100 inclusive. kwargs :", "subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800,", "np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1,", "axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]),", "using Plotly. Parameters ---------- percentile : list, optional Sequence of", "y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1}", "units == \"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label", "enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof] probe_phase[i] = np.array(", "dofy])) probe_resp[j] = ( _probe_resp[0] * np.cos(angle) ** 2 +", "np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j, mag", "a set of probes. Parameters ---------- dof : int Degree", "fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6,", "color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) ) for j, n in", "i, p in enumerate(probe): dofx = p[0] * self.number_dof dofy", "%{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for i, p", "percentile = np.sort(percentile) if units == \"m\": r_axis_label = \"<b>Amplitude", "ndof * n + 1], 50 - p / 2,", "go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency:", "to observe the response. percentile : list, optional Sequence of", "fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile,", "exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ), )", "%{z:.2e}\" ), **kwargs, ) ) for i, p in enumerate(percentile):", "theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p),", "line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, )", "= probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0,", "between them using the attribute plot_type. The default is bokeh", "(dB)</b>\" if fig is None: fig = go.Figure() color_p =", "possible to visualize multiples harmonics in a single plot to", "data in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for data in fig2[\"data\"]:", "\"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" if fig", "Unit system Default is \"mic-pk-pk\". kwargs : optional Additional key", "input. Parameters ---------- percentile : list, optional Sequence of percentiles", "legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", )", "percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs)", "subplots.add_trace(data, 1, 1) for data in fig1[\"data\"]: subplots.add_trace(data, 1, 2)", "Response. Parameters ---------- speed_range : array Array with the speed", "plots the phase response given an output and an input", "borderwidth=2, ), ) return fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs):", "), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace(", "k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval,", "per shaft element's node Returns ------- fig : Plotly graph_objects.Figure()", "height=600, ...). *See Plotly Python Figure Reference for more information.", "= phase def plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ):", "- Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=( \"<b>Amplitude:", "# fmt: off fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs)", "= probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0),", "as go from plotly import io as pio from plotly.subplots", "elif units == \"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else:", "= np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp", ") # fmt: on for data in fig0[\"data\"]: data.showlegend =", "time_range self.yout = yout self.xout = xout self.nodes_list = nodes_list", "- np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :,", "Time Response and Orbit Response. Parameters ---------- time_range : 1-dimensional", "inclusive. kwargs : optional Additional key word arguments can be", "), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)),", "**kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence", "axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=(", "angle vs frequency. \"\"\" def __init__(self, forced_resp, magnitude, phase, frequency_range,", "line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See Plotly Python Figure Reference for", "vs frequency. \"\"\" def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof):", "+= 1 for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp,", "Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) for j, p in", "log_dec): self.speed_range = speed_range self.wd = wd self.log_dec = log_dec", "p2 = np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace(", "self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots = make_subplots( rows=2, cols=2, specs=[[{},", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6,", "if units == \"m\": y_axis_label = \"<b>Amplitude (m)</b>\" elif units", "the plot. \"\"\" if fig is None: fig = go.Figure()", "and an input. Parameters ---------- percentile : list, optional Sequence", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_TimeResponseResults: \"\"\"Store", "Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude", "xout : array Time evolution of the state vector. nodes_list:", "fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h", "** 2 ) # fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp,", "enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1)", "enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p / 2, axis=0)", "= make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{},", "bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_TimeResponseResults: \"\"\"Store stochastic", "int Number of degrees of freedom per shaft element's node", "angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ), ) return fig", "row=2, col=1) for data in fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis,", "orbits for a given node on the rotor system in", "+ np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j,", "legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) fig.update_xaxes(", "stochastic results and provide plots for Forced Response. Parameters ----------", "i, p in enumerate(conf_interval): p1 = np.percentile(self.phase, 50 + p", "x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j,", "node], 50 - p / 2, axis=0) p3 = np.percentile(self.yout[...,", "plot 1x. kwargs : optional Additional key word arguments can", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for i, p in", "= dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k,", "*args, **kwargs): \"\"\"Plot orbit response (3D). This function plots orbits", "/ 2, axis=0) p4 = np.percentile(probe_phase, 50 - p /", "def plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ):", "attribute plot_type. The default is bokeh Parameters ---------- speed_range :", "harmonics to be plotted. The default is to plot 1x.", "fig = go.Figure() default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile", "Plotly graph_objects.Figure() The figure object with the plot. \"\"\" def", "title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", "self.speed_range[::-1])) for j, h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range", "\"\"\"Plot the log_dec vs frequency. Parameters ---------- percentile : list,", "for frequency and log dec. \"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval,", "False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\"", "v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter(", "%{y:.2e}\" ), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node", "%{z:.2e}\" ), **kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False),", "is \"mic-pk-pk\". kwargs : optional Additional key word arguments can", "return fig def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot", "class ST_FrequencyResponseResults: \"\"\"Store stochastic results and provide plots for Frequency", "\"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) for i,", "phase angle vs frequency. \"\"\" def __init__(self, speed_range, magnitude, phase):", "= self.number_dof default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile =", "go.Figure() color_p = 0 color_i = 0 for i, p", "customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} -", "fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_polar_bode( self, percentile=[],", "axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} -", "The figure object with the plot. \"\"\" default_values = dict(mode=\"lines\")", "color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe {i +", "np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp =", "\"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\" + \"Y", "font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots class", "a rotor model. nodes_pos: array Rotor nodes axial positions. number_dof", "word arguments can be passed to change the plot layout", "fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1)", "visualize multiples harmonics in a single plot to check other", "The user chooses between them using the attribute plot_type. The", "axis=0) p4 = np.percentile(self.yout[..., ndof * node + 1], 50", "response magnitude given an output and an input using Plotly.", "{i + 1} - confidence interval: {p}%\", ) ) color_i", "default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if", "= go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p,", "axes kwargs : optional Additional key word arguments can be", "p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase,", "self.yout[..., ndof * n + 1], 50 - p /", "polar_kwargs : optional Additional key word arguments can be passed", "amplitude vs frequency phase angle vs frequency. \"\"\" fig0 =", "This method plots the unbalance response magnitude. Parameters ---------- probe", "dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v", "function plots orbits for each node on the rotor system", "frequency phase angle vs frequency. \"\"\" def __init__(self, speed_range, magnitude,", "force response for each node for each frequency. frequency_range :", "def __init__(self, speed_range, wd, log_dec): self.speed_range = speed_range self.wd =", "percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\"", "return fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase angle", "in default_values.items(): kwargs.setdefault(k, v) for i, p in enumerate(probe): dofx", "axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\",", "+ 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", )", "50 + p / 2, axis=1) p2 = np.percentile(self.wd[j], 50", "2) # fmt: on probe_phase = np.zeros_like(self.phase[:, :, 0]) for", "i in aux_phase] ) angle = p[1] probe_phase[i] = probe_phase[i]", "and provide plots for Time Response and Orbit Response. Parameters", "- p / 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3,", "their nodes and orientations. Parameters ---------- probe : list of", "Diagram. It's possible to visualize multiples harmonics in a single", "\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, ) ) for", "input using Plotly. Parameters ---------- percentile : list, optional Sequence", "1, 1) for data in fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis,", "row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900,", "{i + 1} - confidence interval: {p}%\", legendgroup=f\"Probe {i +", "x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i),", "given a set of probes. Parameters ---------- dof : int", "= self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs)", "fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1} - confidence interval: {p}%\",", "response. This method plots the phase response given a set", "- confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i", "* node + 1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=(", "line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency:", "kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase,", "Degree of freedom. units : str Magnitude unit system. Default", "percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "for a given node on the rotor system in a", "Magnitude of the frequency response for node for each frequency.", "axis=0), y=np.percentile(self.yout[..., ndof * node + 1], p, axis=0), opacity=0.6,", "0 for i, p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :,", "n], axis=0), z=np.mean(self.yout[..., ndof * n + 1], axis=0), line=dict(width=5,", "frequency and phase response given an output and an input.", "3D view. Parameters ---------- percentile : list, optional Sequence of", "plot( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot", "= np.zeros_like(self.phase[:, :, 0]) for j, phs in enumerate(self.phase): aux_phase", "options for plooting are available: Matplotlib and Bokeh. The user", "ndof * node + 1], 50 - p / 2,", "p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0),", "theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i +", "percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response. This method plots", "fig1[\"data\"]: data.showlegend = False fig.add_trace(data, row=2, col=1) for data in", "* np.cos(angle) ** 2 + _probe_resp[1] * np.sin(angle) ** 2", ": str, optional Unit system Default is \"mic-pk-pk\". kwargs :", "be between 0 and 100 inclusive. kwargs : optional Additional", "line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} - percentile: {p}%\", legendgroup=f\"Probe", "probe is located. orientation : float, probe orientation angle about", "the plot. kwargs : optional Additional key word arguments can", "conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for k, v in", "the plot. \"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if", "wd, log_dec): self.speed_range = speed_range self.wd = wd self.log_dec =", "for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0,", "Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude: %{y:.2e}\" ), **kwargs, )", "fig = go.Figure() color_i = 0 color_p = 0 for", "log_dec vs frequency. Parameters ---------- percentile : list, optional Sequence", "for j, phs in enumerate(self.phase): aux_phase = phs[:, p[0] *", "+ 1 angle = p[1] # fmt: off operator =", "is to plot 1x. kwargs : optional Additional key word", "go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[..., ndof * node", "= np.percentile(self.magnitude, 50 - p / 2, axis=1) fig.add_trace( go.Scatter(", "_probe_resp[1] * np.sin(angle) ** 2 ) # fmt: on fig.add_trace(", "go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\",", "+ 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec:", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural", "available: Matplotlib and Bokeh. The user chooses between them using", ": int Number of degrees of freedom per shaft element's", ") fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self, node, percentile=[],", "conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude vs frequency. This method", "number_dof : int Number of degrees of freedom per shaft", "default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase,", "be plotted. The default is to plot 1x. kwargs :", "= dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k, v) fig1", "== \"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label =", "2, axis=0) p2 = np.percentile(probe_resp, 50 - p / 2,", "return fig def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot", "color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) )", "nodes and orientations. Parameters ---------- probe : list of tuples", "is None: fig = go.Figure() color_i = 0 color_p =", "Returns ------- fig : Plotly graph_objects.Figure() The figure object with", "fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 - p", "array Array with the damped natural frequencies log_dec : array", "subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis,", "1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if j ==", "< 0 else i for i in aux_phase] ) angle", "opacity=0.5, name=f\"Probe {i + 1} - confidence interval: {p}%\", hovertemplate=(\"Time:", "response given a set of probes. Parameters ---------- probe :", "(μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval)", "p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=(", ") ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50", "+ \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) for", "phase of the frequency response for each pair input/output. Returns", "for data in fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1)", "%{y:.2f}\", ) ) color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1]))", "interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.time_range, self.time_range[::-1]))", "p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1) p3", "dofx = p[0] * self.number_dof dofy = p[0] * self.number_dof", "if j == 0 else False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\"", "conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig is None:", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\",", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_polar_bode(", "plot. \"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig", "hoverinfo=\"none\", **kwargs, ) ) for i, p in enumerate(conf_interval): p1", "and an input using Plotly. Parameters ---------- percentile : list,", "height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\",", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5),", "results and provide plots for Time Response and Orbit Response.", "harmonics=[1], **kwargs): \"\"\"Plot the log_dec vs frequency. Parameters ---------- percentile", "frequency response. This method plots the unbalance response magnitude. Parameters", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200, height=900,", "probe_phase[i] = np.array( [i + 2 * np.pi if i", "with their nodes and orientations. Parameters ---------- probe : list", "fig class ST_ForcedResponseResults: \"\"\"Store stochastic results and provide plots for", "phase : array Phase of the frequency response for node", ": array System response. xout : array Time evolution of", "else False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude:", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5,", "/ 2, axis=0) p3 = np.percentile(self.yout[..., ndof * node +", "phase): self.speed_range = speed_range self.magnitude = magnitude self.phase = phase", "**kwargs, ): \"\"\"Plot polar forced response using Plotly. Parameters ----------", "with the plot. args : optional Additional plot axes kwargs", "given a set of probes. Parameters ---------- probe : list", "\"\"\"Plot the damped natural frequencies vs frequency. Parameters ---------- percentile", "{p}%\", legendgroup=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase:", "row=1, col=1) for data in fig1[\"data\"]: data.showlegend = False fig.add_trace(data,", "y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\"", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]),", "np.sin(angle) ** 2 ) # fmt: on fig.add_trace( go.Scatter( x=self.time_range,", "fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), ) return", "* node + 1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile:", ") ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range,", ") probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j, mag in", "**kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude -", "an output and an input using bokeh. Parameters ---------- percentile", "1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) for j,", "i, p in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof * node],", "/ 2, axis=1) p2 = np.percentile(self.magnitude, 50 - p /", "%{y:.2f}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "%{y:.2f}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs),", "go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]),", "{i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) )", "between 0 and 100 inclusive. conf_interval : list, optional Sequence", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict(", "frequency. \"\"\" def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp", "vs frequency. \"\"\" # fmt: off fig0 = self.plot_magnitude(probe, percentile,", "**kwargs): \"\"\"Plot phase angle response. This method plots the phase", "if units == \"m\": r_axis_label = \"<b>Amplitude (m)</b>\" elif units", "go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p),", "{i + 1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude:", "method plots the phase response given a set of probes.", "= nodes_list self.nodes_pos = nodes_pos self.number_dof = number_dof def plot_1d(", "the attribute plot_type. The default is bokeh Parameters ---------- speed_range", "{i + 1} - percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase:", "showspikes=False), ), ) return fig class ST_ForcedResponseResults: \"\"\"Store stochastic results", "linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) fig =", "p1 = np.percentile(self.phase, 50 + p / 2, axis=1) p2", "the probe is located. orientation : float, probe orientation angle", "= p[0] * self.number_dof dofy = p[0] * self.number_dof +", "on the rotor system in a 2D view. Parameters ----------", "harmonics: list, optional List withe the harmonics to be plotted.", "list with nodes from a rotor model. nodes_pos: array Rotor", "node, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (2D).", "class ST_CampbellResults: \"\"\"Store stochastic results and provide plots for Campbell", "row=2, col=1) for data in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis,", "the plot. \"\"\" if units == \"m\": y_axis_label = \"<b>Amplitude", "fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase:", "p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i +", "units=units, **kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 =", "mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\",", "hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ),", "line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j == 0 else False,", "Reference for more information. Returns ------- subplots : Plotly graph_objects.make_subplots()", "frequency response for node for each frequency. number_dof = int", "go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency:", "Array with the speed range in rad/s. magnitude : array", "for data in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for data in", "- Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 + p / 2, axis=1)", "+ \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) color_p += 1 for", "optional Additional key word arguments can be passed to change", "self.forced_resp = forced_resp self.magnitude = magnitude self.phase = phase self.frequency_range", "fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if fig is", "width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), )", "\"m\": y_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": y_axis_label", "in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: subplots.add_trace(data,", ") for i, p in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50", "= np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) if", ") probe_resp = np.zeros_like(self.yout[:, :, 0]) for j, y in", "in fig0[\"data\"]: subplots.add_trace(data, 1, 1) for data in fig1[\"data\"]: subplots.add_trace(data,", "**kwargs, ) ) for i, p in enumerate(conf_interval): p1 =", "mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} - percentile: {p}%\",", "axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\"", "frequency response for each pair input/output. phase : array Array", "ndof * node + 1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]),", "- np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :,", "plots for Frequency Response. Parameters ---------- speed_range : array Array", "for data in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1)", "+ 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for", "pio from plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default", "response. This method plots the frequency and phase response given", "kwargs.setdefault(k, v) if fig is None: fig = go.Figure() line", "each frequency. phase : array Phase of the frequency response", "self.xout = xout self.nodes_list = nodes_list self.nodes_pos = nodes_pos self.number_dof", "plot. \"\"\" if fig is None: fig = go.Figure() default_values", "p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 + p /", "x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j + 1),", "100 inclusive. fig : Plotly graph_objects.Figure() The figure object with", "+ p / 2, axis=0) p2 = np.percentile(self.yout[..., ndof *", "node], p, axis=0), y=np.percentile(self.yout[..., ndof * node + 1], p,", "p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\",", "name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" +", "from plotly import io as pio from plotly.subplots import make_subplots", "fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\",", "k, v in default_values.items(): kwargs.setdefault(k, v) for i, p in", "col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2)", "located. orientation : float, probe orientation angle about the shaft.", "opacity=0.6, line=dict(width=2.5), name=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=(\"Time:", "of probes. Parameters ---------- dof : int Degree of freedom", "input/output. phase : array Array with the frequencies, phase of", "height=800, ...). *See Plotly Python Figure Reference for more information.", "plot. \"\"\" def __init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos):", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot(self, percentile=[],", "= np.percentile(probe_resp, 50 + p / 2, axis=0) p2 =", "fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" +", "the respective orbit response. percentile : list, optional Sequence of", "probe orientation angle about the shaft. The 0 refers to", "which must be between 0 and 100 inclusive. conf_interval :", "bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic", "/ 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]),", "This method plots the phase response given an output and", "(e.g. width=1000, height=800, ...). *See Plotly Python Figure Reference for", "dof : int Degree of freedom to observe the response.", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude,", "tableau_colors pio.renderers.default = \"browser\" # set Plotly palette of colors", "speed_range self.magnitude = magnitude self.phase = phase def plot_magnitude( self,", "fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis,", "in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude,", "= 0 color_p = 0 for i, p in enumerate(probe):", "+ p / 2, axis=0), z=np.percentile( self.yout[..., ndof * n", "axes with magnitude plot. \"\"\" if units == \"m\": y_axis_label", "height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return", "px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic results and", "* np.sin(angle)) ** 2) # fmt: on fig.add_trace( go.Scatter( x=self.frequency_range,", "y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[..., ndof * n +", "nodes_list: array list with nodes from a rotor model. nodes_pos:", ": optional Additional key word arguments can be passed to", "units : str, optional Unit system Default is \"mic-pk-pk\". kwargs", "= p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp,", "the plot. units : str Magnitude unit system. Default is", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\",", "+ \"Y - Amplitude: %{y:.2e}\" ), **kwargs, ) ) for", "*See Plotly Python Figure Reference for more information. Returns -------", "opacity=1.0, ...) *See Plotly Python Figure Reference for more information.", "ST_CampbellResults: \"\"\"Store stochastic results and provide plots for Campbell Diagram.", "), ) return fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs):", "# set Plotly palette of colors colors1 = px.colors.qualitative.Dark24 colors2", "\"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig is", "= np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items():", "1], 50 - p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]),", "in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p / 2,", "for j, y in enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:,", "p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X -", "data.showlegend = False fig.add_trace(data, row=1, col=1) for data in fig1[\"data\"]:", "/ 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]),", "Y</b>\"), showspikes=False), ), ) return fig class ST_ForcedResponseResults: \"\"\"Store stochastic", "fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[..., ndof *", "legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for", "line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}% - Mode", "50 - p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6,", "{p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\")", "= self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe, percentile,", "optional harmonics : list, optional List with the harmonics to", "title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout(", "p4 = np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace(", "legendgroup=\"perc{}\".format(p), showlegend=True if j == 0 else False, hovertemplate=( \"Nodal", "conf_interval, *args, **kwargs) subplots = make_subplots(rows=1, cols=2) for data in", "p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 + p /", "\"\"\"Store stochastic results and provide plots for Forced Response. Parameters", "= 0 for i, p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:,", "response. This method plots the phase response given an output", "+= 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, )", "frequency. \"\"\" fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values =", "response given an output and an input using bokeh. Parameters", "%{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\")", "Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", ": int Degree of freedom to observe the response. percentile", "This method plots the frequency and phase response given an", "y in enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:, dofx], y[:,", "be between 0% and 100% inclusive. fig : Plotly graph_objects.Figure()", "* n + 1], 50 - p / 2, axis=0", "be between 0 and 100 inclusive. fig : Plotly graph_objects.Figure()", "\"Frequency: %{y:.3f}\" ), **kwargs, ) ) for i, p in", "np.percentile(self.yout[..., ndof * node + 1], 50 - p /", "1} - percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile{p}\",", "color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if j == 0 else False,", "{}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X -", "v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j,", "cols=2) for data in fig0[\"data\"]: subplots.add_trace(data, 1, 1) for data", "j + 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes(", "frequencies, magnitude (dB) of the frequency response for each pair", "+ \"Phase: %{y:.2f}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1]))", "v) if fig is None: fig = go.Figure() fig.add_trace( go.Scatter(", "font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def", "system Default is \"mic-pk-pk\". kwargs : optional Additional key word", "p1 = np.percentile(self.yout[..., ndof * node], 50 + p /", "and 100 inclusive. units : str, optional Unit system Default", "where the probe is located. orientation : float, probe orientation", "+ p / 2, axis=0) p2 = np.percentile(probe_phase, 50 -", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_phase(self,", "default_values.items(): kwargs.setdefault(k, v) for i, p in enumerate(probe): dofx =", "Orbit Response. Parameters ---------- time_range : 1-dimensional array Time array.", "default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1),", "for i, p in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 +", "node + 1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p),", "fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self, node, percentile=[], conf_interval=[],", "np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False,", "Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, )", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\",", "i < 0 else i for i in aux_phase] )", "fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1)", "plot. units : str, optional Unit system Default is \"mic-pk-pk\".", "p / 2, axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval:", "v) if fig is None: fig = go.Figure() line =", "def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency response.", ":, 0]) for j, mag in enumerate(self.magnitude): _probe_resp = operator", "which must be between 0 and 100 inclusive. args: optional", "method plots the frequency and phase response given an output", "= False fig.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: data.showlegend", "angle about the shaft. The 0 refers to +X direction.", "---------- dof : int Degree of freedom. units : str", "+ 1], 50 - p / 2, axis=0) fig.add_trace( go.Scatter(", "self.yout[..., ndof * n + 1], 50 + p /", "v) fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1),", "fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) )", ": array Array with the frequencies. magnitude : array Magnitude", "- p / 2, axis=0) p3 = np.percentile(self.yout[..., ndof *", "opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"),", "ST_ForcedResponseResults: \"\"\"Store stochastic results and provide plots for Forced Response.", "frequencies. magnitude : array Magnitude of the frequency response for", "opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"),", "default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2 =", "row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict(", "node + 1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X", "= \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" for", "fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values = dict(showlegend=False) for", "* self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n], axis=0),", "in fig1[\"data\"]: data.showlegend = False fig.add_trace(data, row=2, col=1) for data", "k, v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval,", "/ 2, axis=0) p2 = np.percentile(self.yout[..., ndof * node], 50", "the unbalance response magnitude. Parameters ---------- probe : list of", "plot. units : str Magnitude unit system. Default is \"mic-pk-pk\"", "for i in aux_phase] ) angle = p[1] probe_phase[i] =", "self.magnitude = magnitude self.phase = phase def plot_magnitude( self, percentile=[],", "Plotly graph_objects.Figure() The figure object with the plot. \"\"\" if", "{}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "and log dec. \"\"\" def __init__(self, speed_range, wd, log_dec): self.speed_range", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase", "dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2", "graph_objects.Figure() The figure object with the plot. args : optional", "with the plot. units : str, optional Unit system Default", "frequencies log_dec : array Array with the Logarithmic decrement Returns", "size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot(self,", "plots orbits for a given node on the rotor system", "data in fig0[\"data\"]: subplots.add_trace(data, 1, 1) for data in fig1[\"data\"]:", "nodes_list, nodes_pos): self.time_range = time_range self.yout = yout self.xout =", "response (3D). This function plots orbits for each node on", "vs frequency phase angle vs frequency. \"\"\" def __init__(self, speed_range,", "for data in fig0[\"data\"]: data.showlegend = False fig.add_trace(data, row=1, col=1)", "response given an output and an input. Parameters ---------- percentile", "damped natural frequencies log_dec : array Array with the Logarithmic", "False fig.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: fig.add_trace(data, row=1,", "%{y:.2e}\" ), **kwargs, ) ) for i, p in enumerate(percentile):", "for i, p in enumerate(probe): dofx = p[0] * self.number_dof", "y_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": y_axis_label =", "1], p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True", "optional Unit system Default is \"mic-pk-pk\" kwargs : optional Additional", "unbalance response magnitude. Parameters ---------- probe : list of tuples", "the plot layout only (e.g. width=1000, height=800, ...). *See Plotly", "\"rowspan\": 2}], [{}, None]] ) for data in fig0[\"data\"]: subplots.add_trace(data,", "is None: fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof *", "(dB)</b>\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "(dB)</b>\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if fig is", "conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if", "col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1)", "the shaft. The 0 refers to +X direction. percentile :", "go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j +", ": list, optional Sequence of percentiles to compute, which must", "also excite a specific natural frequency. Two options for plooting", "opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} - percentile: {p}%\",", ": array Array with the force response for each node", "h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\"", "figure with amplitude vs frequency phase angle vs frequency. \"\"\"", "p2 = np.percentile(self.magnitude, 50 - p / 2, axis=1) fig.add_trace(", "color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" +", "return fig def plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ):", "the node to display the respective orbit response. percentile :", "axis=0) p2 = np.percentile(self.yout[..., ndof * node], 50 - p", "vs frequency phase angle vs frequency. \"\"\" # fmt: off", "fig is None: fig = go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace(", "): \"\"\"Plot polar forced response using Plotly. Parameters ---------- probe", "Reference for more information. Returns ------- fig : Plotly graph_objects.Figure()", "with the plot. \"\"\" ndof = self.number_dof default_values = dict(mode=\"lines\")", "\"\"\"Plot time response. This method plots the time response given", "single plot to check other speeds which also excite a", "p / 2, axis=1) p2 = np.percentile(self.wd[j], 50 - p", "hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude: %{y:.2e}\"", "for Campbell Diagram. It's possible to visualize multiples harmonics in", "array. yout : array System response. xout : array Time", "= p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range,", "of analyses in st_rotor_assembly.py. \"\"\" import numpy as np from", "Plotly graph_objects.Figure() The figure object with the plot. \"\"\" ndof", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_phase(self, percentile=[],", "speed_range, wd, log_dec): self.speed_range = speed_range self.wd = wd self.log_dec", "{}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) )", "v in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval, *args,", "number_dof, nodes_list, nodes_pos): self.time_range = time_range self.yout = yout self.xout", "): \"\"\"Plot time response. This method plots the time response", "np.percentile(self.yout[..., ndof * node + 1], 50 + p /", "fig : Plotly graph_objects.Figure() Bokeh plot axes with magnitude plot.", "hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "%{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) for j, p", "a 3D view. Parameters ---------- percentile : list, optional Sequence", "wd self.log_dec = log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs):", "ST_TimeResponseResults: \"\"\"Store stochastic results and provide plots for Time Response", "z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) ) for j,", "node + 1], 50 - p / 2, axis=0) fig.add_trace(", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase,", ") for j, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p,", ": array Time evolution of the state vector. nodes_list: array", "title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis,", ") return fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase", "(dB)</b>\" for k, v in default_values.items(): kwargs.setdefault(k, v) fig =", "0]) for j, phs in enumerate(self.phase): aux_phase = phs[:, p[0]", "\"\"\"Plot frequency response. This method plots the frequency and phase", "*args, **kwargs): \"\"\"Plot Campbell Diagram. This method plots Campbell Diagram.", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile:", "x = np.concatenate((self.time_range, self.time_range[::-1])) for j, p in enumerate(conf_interval): p1", "graph_objects.Figure() Bokeh plot axes with magnitude plot. \"\"\" if units", "Angle</b>\") fig.update_layout(**kwargs), return fig def plot_polar_bode( self, probe, percentile=[], conf_interval=[],", "for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p,", "method plots the frequency response magnitude given an output and", "frequency response magnitude given an output and an input using", "2, axis=0) p3 = np.percentile(self.yout[..., ndof * node + 1],", "p / 2, axis=0) p3 = np.percentile(probe_phase, 50 + p", "np.sort(conf_interval) percentile = np.sort(percentile) if fig is None: fig =", "color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) )", "1], 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1,", "@ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle))", "= np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatterpolar(", "* np.sin(angle) ** 2 ) # fmt: on fig.add_trace( go.Scatter(", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\",", "go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"),", "frequency and log dec. \"\"\" def __init__(self, speed_range, wd, log_dec):", "is \"mic-pk-pk\" polar_kwargs : optional Additional key word arguments can", "- angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\",", "speed_range, magnitude, phase): self.speed_range = speed_range self.magnitude = magnitude self.phase", "compute, which must be between 0% and 100% inclusive. fig", "amplitude vs frequency phase angle vs frequency. \"\"\" # fmt:", "i, p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 + p", "plots the phase response given a set of probes. Parameters", "pio.renderers.default = \"browser\" # set Plotly palette of colors colors1", "enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[...,", "p in enumerate(conf_interval): p1 = np.percentile(self.phase, 50 + p /", "= np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in enumerate(harmonics): fig.add_trace( go.Scatter(", "graph_objects.Figure() The figure object with the plot. kwargs : optional", "%{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) x = np.concatenate((self.speed_range,", "p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}% -", "Array with the force response for each node for each", "fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]),", "of freedom. units : str Magnitude unit system. Default is", "color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"), **kwargs, )", "plots the frequency and phase response given an output and", "ndof * n], p, axis=0), z=np.percentile(self.yout[..., ndof * n +", "*args, **kwargs ): \"\"\"Plot time response. This method plots the", "axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(", "1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"),", "float, probe orientation angle about the shaft. The 0 refers", "np.percentile(self.magnitude, 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x,", "for data in fig1[\"data\"]: data.showlegend = False fig.add_trace(data, row=2, col=1)", "fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1} - confidence interval:", "number_dof): self.forced_resp = forced_resp self.magnitude = magnitude self.phase = phase", "using Plotly. Parameters ---------- dof : int Degree of freedom.", "change the plot layout only (e.g. width=1000, height=800, ...). *See", "fig = go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range,", "= make_subplots(rows=1, cols=2) for data in fig0[\"data\"]: subplots.add_trace(data, 1, 1)", "with the damped natural frequencies log_dec : array Array with", "np.sort(percentile) if units == \"m\": r_axis_label = \"<b>Amplitude (m)</b>\" elif", "self.number_dof + 1 angle = p[1] # fmt: off operator", "probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs ): \"\"\"Plot time response.", "0% and 100% inclusive. units : str, optional Unit system", "x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof", "y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i + 1}", "y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i + 1} - Mean\", line=dict(width=3.0),", "plots the frequency and phase response given a set of", "1} - percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" +", "50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1,", "plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ),", "def plot_1d( self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs ):", "{i + 1} - Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase:", "graph_objects.Figure() The figure object with the plot. \"\"\" if fig", "plots orbits for each node on the rotor system in", "the plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See Plotly Python", ") for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50", "to change the plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5,", "dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k, v) fig1 =", "data in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for data in fig1[\"data\"]:", "phase angle vs frequency. \"\"\" fig0 = self.plot_magnitude(percentile, conf_interval, units,", "k, v in default_values.items(): kwargs.setdefault(k, v) if fig is None:", "interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i += 1", "Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\"", "np from plotly import express as px from plotly import", "%{y:.2f}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i,", "frequency_range self.number_dof = number_dof def plot_magnitude( self, probe, percentile=[], conf_interval=[],", "color_i = 0 for i, p in enumerate(probe): probe_phase =", ": array Array with the speed range in rad/s. wd", "n + 1], 50 + p / 2, axis=0 ),", "view. Parameters ---------- node : int Select the node to", "name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", "wd : array Array with the damped natural frequencies log_dec", "fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency response. This method plots", "return fig def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency", "line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"), **kwargs,", "line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe {i", "object with the plot. kwargs : optional Additional key word", "- p / 2, axis=0), z=np.percentile( self.yout[..., ndof * n", "showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ),", "Parameters ---------- dof : int Degree of freedom to observe", "axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency:", "= np.percentile(probe_phase, 50 + p / 2, axis=0) p4 =", "None: fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node],", "speeds which also excite a specific natural frequency. Two options", "x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j,", "{}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) )", "set of probes. Parameters ---------- probe : list of tuples", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20),", "dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units ==", "np.sort(conf_interval) percentile = np.sort(percentile) if units == \"m\": r_axis_label =", "opacity=1.0, name=\"Mean - Mode {}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j),", "plot(self, percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell Diagram. This method", "+= 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in", "name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, )", "col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict( radialaxis=fig2.layout.polar.radialaxis,", "50 + p / 2, axis=1) p4 = np.percentile(self.phase, 50", "plot axes kwargs : optional Additional key word arguments can", "in enumerate(probe): dofx = p[0] * self.number_dof dofy = p[0]", "and provide plots for Forced Response. Parameters ---------- force_resp :", "None: fig = go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos,", ") return subplots class ST_TimeResponseResults: \"\"\"Store stochastic results and provide", "in default_values.items(): kwargs.setdefault(k, v) if fig is None: fig =", "{\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) for data in", "data in fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis,", "for more information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly", "must be between 0 and 100 inclusive. kwargs : optional", "= go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line,", "dec. \"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values =", "and Orbit Response. Parameters ---------- time_range : 1-dimensional array Time", ": int Degree of freedom. units : str Magnitude unit", "mag in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx], mag[:,", "False fig.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: data.showlegend =", "with the plot. \"\"\" def __init__(self, time_range, yout, xout, number_dof,", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j],", "= np.percentile(self.yout[..., ndof * node + 1], 50 + p", "- Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase:", "= go.Figure() fig.add_trace( go.Scatterpolar( r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\",", "go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6,", "50 + p / 2, axis=1) p2 = np.percentile(self.log_dec[j], 50", "): \"\"\"Plot amplitude vs frequency. This method plots the frequency", "plot. args : optional Additional plot axes kwargs : optional", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\",", "enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe", "default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude,", "\"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" for k,", "y=np.percentile(self.yout[..., ndof * n], p, axis=0), z=np.percentile(self.yout[..., ndof * n", "make_subplots(rows=1, cols=2) for data in fig0[\"data\"]: subplots.add_trace(data, 1, 1) for", "fig = go.Figure() color_p = 0 color_i = 0 for", "figure object with the plot. \"\"\" default_values = dict(mode=\"lines\") conf_interval", "go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j],", "the response. percentile : list, optional Sequence of percentiles to", "to compute, which must be between 0% and 100% inclusive.", "2, axis=1) p4 = np.percentile(self.phase, 50 - p / 2,", "axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i],", "be passed to change the plot layout only (e.g. width=1000,", "), **kwargs, ) ) for i, p in enumerate(conf_interval): fig.add_trace(", "in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs)", "layout (e.g. width=800, height=600, ...). *See Plotly Python Figure Reference", "function plots orbits for a given node on the rotor", "+ np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :, 0]) for j,", "else: y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval) percentile =", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "fig=None, *args, **kwargs ): \"\"\"Plot time response. This method plots", "/ 2, axis=1) p2 = np.percentile(self.log_dec[j], 50 - p /", "np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j],", "= np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) for", "decrement Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with", "z=np.percentile(self.yout[..., ndof * n + 1], p, axis=0), opacity=1.0, name=\"percentile:", "node to display the respective orbit response. percentile : list,", "opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\"", "def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response. This", "the node where the probe is located. orientation : float,", "def __init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos): self.time_range =", "{i + 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\",", "+ p / 2, axis=1) p2 = np.percentile(self.phase, 50 -", "percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response using", "the frequency response for each pair input/output. Returns ------- subplots", "np.percentile(self.wd[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter( x=x,", "orbit response (3D). This function plots orbits for each node", "node + 1], 50 + p / 2, axis=0) p4", "x=x, y=np.percentile(self.yout[..., ndof * n], p, axis=0), z=np.percentile(self.yout[..., ndof *", "probe : list of tuples List with tuples (node, orientation", "= np.percentile(self.phase, 50 + p / 2, axis=1) p2 =", "name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency:", "), ) return subplots class ST_TimeResponseResults: \"\"\"Store stochastic results and", "fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X - Amplitude:", "harmonics=[1], **kwargs): \"\"\"Plot the damped natural frequencies vs frequency. Parameters", "list, optional Sequence of percentiles to compute, which must be", "confidence interval: {p}%\", ) ) color_i += 1 fig.update_layout( polar=dict(", "2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1,", "Default is \"mic-pk-pk\" kwargs : optional Additional key word arguments", "the frequencies. magnitude : array Magnitude of the frequency response", "Plotly Python Figure Reference for more information. Returns ------- subplots", "), **kwargs, ) ) for i, p in enumerate(conf_interval): p1", "\"Phase: %{y:.2f}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for", "), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if", "magnitude. Parameters ---------- probe : list of tuples List with", ") ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return", "\"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, ) ) for i, p in", "plot layout only (e.g. width=1000, height=800, ...). *See Plotly Python", "j, mag in enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx],", "fmt: off fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1", "for each pair input/output. phase : array Array with the", "p / 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])),", "name=\"confidence interval: {}% - Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j,", "2, axis=0) fig.add_trace( go.Scatter( x=np.concatenate((p1, p2[::-1])), y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]),", "Plotly. Parameters ---------- dof : int Degree of freedom. units", "confidence intervals to compute, which must be between 0 and", "for j, h in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range *", "p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\",", "default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) for", "0 and 100 inclusive. args: optional harmonics : list, optional", "**kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter(", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped", "frequency response for each pair input/output. Returns ------- subplots :", "- Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.add_trace( go.Scatter3d( x=x,", "\"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.update_layout( scene=dict(", "kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1),", "conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2", "for i, p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 +", "np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude,", "{i + 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\",", "direction. percentile : list, optional Sequence of percentiles to compute,", "np.sort(conf_interval) percentile = np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k,", "+ 1} - percentile: {p}%\", legendgroup=f\"Probe {i + 1} -", "x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in enumerate(harmonics): fig.add_trace(", ") ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j],", "speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", "which must be between 0 and 100 inclusive. fig :", "_probe_resp[0] * np.cos(angle) ** 2 + _probe_resp[1] * np.sin(angle) **", "col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1)", "orientation angle about the shaft. The 0 refers to +X", "natural frequency. Two options for plooting are available: Matplotlib and", "dof : int Degree of freedom. units : str Magnitude", "Response. Parameters ---------- time_range : 1-dimensional array Time array. yout", "Array with the frequencies. magnitude : array Magnitude of the", "pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval) percentile", "line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\",", "and phase response given a set of probes. Parameters ----------", "= px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic results and provide plots", "np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n],", "The figure object with the plot. args : optional Additional", "of freedom per shaft element's node. Returns ------- subplots :", "subplots = make_subplots(rows=1, cols=2) for data in fig0[\"data\"]: subplots.add_trace(data, 1,", "with amplitude vs frequency phase angle vs frequency. \"\"\" #", "self.nodes_list = nodes_list self.nodes_pos = nodes_pos self.number_dof = number_dof def", "frequency. This method plots the frequency response magnitude given an", "50 + p / 2, axis=0) p4 = np.percentile(probe_phase, 50", "**kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter3d(", "line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase:", "conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (3D). This function", "probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase,", "must be between 0 and 100 inclusive. harmonics: list, optional", "plots for Time Response and Orbit Response. Parameters ---------- time_range", "name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict(", "fig.add_trace( go.Scatterpolar( r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\",", "to check other speeds which also excite a specific natural", "%{x:.2e}<br>\" + \"Y - Amplitude: %{y:.2e}\" ), **kwargs, ) )", "thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase:", "figure object with the plot. \"\"\" ndof = self.number_dof default_values", "), ) return fig class ST_ForcedResponseResults: \"\"\"Store stochastic results and", "Additional plot axes kwargs : optional Additional key word arguments", "go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i", "per shaft element's node. Returns ------- subplots : Plotly graph_objects.make_subplots()", "percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response (3D). This", "enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 +", "probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1] *", "in rad/s. magnitude : array Array with the frequencies, magnitude", "if fig is None: fig = go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[...,", "1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval):", "0 and 100 inclusive. kwargs : optional Additional key word", "legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) x", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) x =", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5,", "p3 = np.percentile(probe_phase, 50 + p / 2, axis=0) p4", "more information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure", "== \"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label =", "x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\",", "The figure object with the plot. kwargs : optional Additional", "Campbell Diagram. This method plots Campbell Diagram. Parameters ---------- percentile", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile:", "go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 - p /", ": array Magnitude of the frequency response for node for", "), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return", "optional Additional plot axes kwargs : optional Additional key word", "information. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly figure with", "title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5,", "in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 + p / 2,", "{i + 1} - percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude:", "ndof = self.number_dof default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile", "+ 1} - percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"),", "self.phase = phase self.frequency_range = frequency_range self.number_dof = number_dof def", "between 0 and 100 inclusive. units : str, optional Unit", "ndof * node], p, axis=0), y=np.percentile(self.yout[..., ndof * node +", "p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=(", "legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) for", "frequency. Two options for plooting are available: Matplotlib and Bokeh.", "row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\",", "angle vs frequency. \"\"\" fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs)", "row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2,", "Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency phase angle", "None]] ) # fmt: on for data in fig0[\"data\"]: data.showlegend", "which also excite a specific natural frequency. Two options for", "import io as pio from plotly.subplots import make_subplots from ross.plotly_theme", "in enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0, name=\"{}x", "with the plot. \"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "of the frequency response for each pair input/output. phase :", "a tuple of probes with their nodes and orientations. Parameters", "= self.plot_polar_bode(probe, percentile, conf_interval, units=units, **kwargs) if fig is None:", "information. Returns ------- fig : Plotly graph_objects.Figure() Bokeh plot axes", "for Time Response and Orbit Response. Parameters ---------- time_range :", "Amplitude: %{y:.2e}\" ), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit:", "name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs,", "for j, p in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 +", "1-dimensional array Time array. yout : array System response. xout", "50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1,", "the Logarithmic decrement Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly", "in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 + p / 2,", ") fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ),", "subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900,", ") ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor Orbit: node {}</b>\".format(node)), return fig", "go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p),", "** 2 + _probe_resp[1] * np.sin(angle) ** 2 ) #", "np.percentile(probe_phase, 50 - p / 2, axis=0) fig.add_trace( go.Scatter( x=x,", "= False fig.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: fig.add_trace(data,", "r=np.mean(self.magnitude, axis=1), theta=np.mean(self.phase, axis=1), customdata=self.speed_range, thetaunit=\"radians\", line=dict(width=3.0, color=\"black\"), name=\"Mean\", legendgroup=\"mean\",", "\"\"\"Plot amplitude vs frequency. This method plots the frequency response", "stochastic results and provide plots for Time Response and Orbit", "2 ) # fmt: on fig.add_trace( go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0),", ") ) for i, p in enumerate(conf_interval): p1 = np.percentile(self.yout[...,", "self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency", "int Degree of freedom to observe the response. percentile :", "\"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile =", "x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i", "go from plotly import io as pio from plotly.subplots import", "self.log_dec = log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot", "== \"m\": y_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\":", "{i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) )", "(e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See Plotly Python Figure Reference", "1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) )", "line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) ) for j, n", "%{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p += 1 x = np.concatenate((self.frequency_range,", "to compute, which must be between 0 and 100 inclusive.", "a 2D view. Parameters ---------- node : int Select the", "color_p = 0 for i, p in enumerate(probe): dofx =", "+ 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) for", "a single plot to check other speeds which also excite", "\"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude", "np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle)) ** 2) #", "2D view. Parameters ---------- node : int Select the node", "fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}% - Mode {}\".format(p, j +", "fig def plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs,", "percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile{p}\", hovertemplate=( \"<b>Amplitude:", "Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p, axis=1),", "multiples harmonics in a single plot to check other speeds", ") ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\",", "percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p += 1", "change the plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See Plotly", "**kwargs, ) ) x = np.concatenate((self.time_range, self.time_range[::-1])) for j, p", "------- fig : Plotly graph_objects.Figure() Bokeh plot axes with magnitude", "plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced", "%{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) )", "col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ),", "name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"", "subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14),", "%{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, )", "a given node on the rotor system in a 2D", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20),", "system Default is \"mic-pk-pk\" kwargs : optional Additional key word", "ndof * n + 1], 50 + p / 2,", "\"\"\" def __init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos): self.time_range", "Forced Response. Parameters ---------- force_resp : array Array with the", "np.percentile(self.yout[..., ndof * node], 50 - p / 2, axis=0)", "the speed range in rad/s. magnitude : array Array with", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) for i, p in enumerate(percentile):", "v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0,", "p1 = np.percentile(probe_phase, 50 + p / 2, axis=0) p2", "dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) )", "opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" +", "0 and 100 inclusive. fig : Plotly graph_objects.Figure() The figure", "= np.percentile(probe_phase, 50 + p / 2, axis=0) p2 =", "axis=0) p3 = np.percentile(probe_phase, 50 + p / 2, axis=0)", "col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1)", "mirror=True, ) fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "\"Log Dec: %{y:.3f}\"), **kwargs, ) ) for i, p in", "tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200,", "elif units == \"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else:", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900,", "r_axis_label = \"<b>Amplitude (dB)</b>\" for k, v in default_values.items(): kwargs.setdefault(k,", "r=np.percentile(probe_resp, p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5,", "This method plots the frequency and phase response given a", "- p / 2, axis=1) p3 = np.percentile(self.phase, 50 +", "go.Scatter( x=self.time_range, y=np.mean(probe_resp, axis=0), opacity=1.0, name=f\"Probe {i + 1} -", "line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if j == 0 else", "+ 1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"),", "as pio from plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors", "(m)</b>\" elif units == \"mic-pk-pk\": y_axis_label = \"<b>Amplitude (μ pk-pk)</b>\"", "scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude -", "= \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" default_values", "\"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude", "plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot phase angle response. This method", "name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe {i + 1}", "__init__(self, speed_range, wd, log_dec): self.speed_range = speed_range self.wd = wd", "\"\"\" ndof = self.number_dof default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval)", "+ 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) )", "fig def plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot", "between 0 and 100 inclusive. fig : Plotly graph_objects.Figure() The", "y[:, dofy])) probe_resp[j] = ( _probe_resp[0] * np.cos(angle) ** 2", "# fmt: off operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle),", "between 0 and 100 inclusive. harmonics: list, optional List withe", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\",", "an input using bokeh. Parameters ---------- percentile : list, optional", "(m)</b>\" elif units == \"mic-pk-pk\": r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\"", "in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.phase, p, axis=1), opacity=0.6, line=dict(width=2.5,", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for j, p in", "percentile, conf_interval, units=units, **kwargs) if fig is None: fig =", "line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1} -", "on probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs in", "p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 + p /", "in st_rotor_assembly.py. \"\"\" import numpy as np from plotly import", "object with the plot. args : optional Additional plot axes", "= operator @ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] = (", "2 * np.pi if i < 0 else i for", "conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response using Plotly.", "percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the log_dec vs frequency. Parameters", "for j, n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n]", "interval: {p}%\", ) ) color_i += 1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label,", "2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2)", "axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False,", "percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency response. This", "enumerate(self.magnitude): _probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i]", "1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for j,", "optional Unit system Default is \"mic-pk-pk\". kwargs : optional Additional", "return subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic results and provide plots", "y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "of colors colors1 = px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults:", "= go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3,", "compute, which must be between 0% and 100% inclusive. units", "module. This module returns graphs for each type of analyses", "1) for data in fig1[\"data\"]: subplots.add_trace(data, 1, 2) subplots.update_xaxes(fig0.layout.xaxis, row=1,", "= go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j in range(self.log_dec.shape[0]):", "legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude:", "magnitude, phase): self.speed_range = speed_range self.magnitude = magnitude self.phase =", "\"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs,", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for i,", "be passed to change the plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0,", "output and an input using Plotly. Parameters ---------- percentile :", "\"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) color_p += 1 for j,", "as px from plotly import graph_objects as go from plotly", "phase self.frequency_range = frequency_range self.number_dof = number_dof def plot_magnitude( self,", "array Time evolution of the state vector. nodes_list: array list", "results and provide plots for Forced Response. Parameters ---------- force_resp", "{p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\")", "*args, **kwargs): \"\"\"Plot orbit response (2D). This function plots orbits", ") for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof", "response. This method plots the unbalance response magnitude. Parameters ----------", "exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return fig def plot( self,", "The 0 refers to +X direction. percentile : list, optional", "2, axis=0) p4 = np.percentile(probe_phase, 50 - p / 2,", "for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1), opacity=1.0,", "name=f\"Probe {i + 1} - confidence interval: {p}%\", legendgroup=f\"Probe {i", "st_rotor_assembly.py. \"\"\" import numpy as np from plotly import express", "axis=0), z=np.percentile(self.yout[..., ndof * n + 1], p, axis=0), opacity=1.0,", "diagrams for frequency and log dec. \"\"\" def __init__(self, speed_range,", "the log_dec vs frequency. Parameters ---------- percentile : list, optional", "graph_objects.Figure() The figure object with the plot. \"\"\" if units", "name=f\"Probe {i + 1} - confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\"", "subplots : Plotly graph_objects.make_subplots() Plotly figure with amplitude vs frequency", "_probe_resp = operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] =", "col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict(", "fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict(", "{p}%\", legendgroup=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude:", "set of probes. Parameters ---------- dof : int Degree of", "each pair input/output. Returns ------- subplots : Plotly graph_objects.make_subplots() Plotly", "v in default_values.items(): kwargs.setdefault(k, v) if fig is None: fig", "figure object with the plot. \"\"\" if fig is None:", "percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude vs frequency. This", "io as pio from plotly.subplots import make_subplots from ross.plotly_theme import", "= p[0] * self.number_dof + 1 angle = p[1] #", "50 - p / 2, axis=0) p3 = np.percentile(self.yout[..., ndof", "y=np.mean(self.magnitude, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" +", "inclusive. conf_interval : list, optional Sequence of confidence intervals to", "for data in fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for data in", "self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe, percentile, conf_interval, units=units,", "\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, ) ) fig.update_xaxes(", "response. xout : array Time evolution of the state vector.", "number_dof def plot_1d( self, probe, percentile=[], conf_interval=[], fig=None, *args, **kwargs", "p / 2, axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1),", ") fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5,", "opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\"", "each frequency. frequency_range : array Array with the frequencies. magnitude", "magnitude given an output and an input using Plotly. Parameters", "1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) )", "p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i),", "Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\",", "**kwargs, ): \"\"\"Plot frequency response. This method plots the frequency", "fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\",", "p[0] * self.number_dof dofy = p[0] * self.number_dof + 1", "k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() x", "provide plots for Campbell Diagram. It's possible to visualize multiples", "method plots the frequency and phase response given a set", "%{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def", "fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: subplots.add_trace(data, row=2,", "node {}</b>\".format(node)), return fig def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args,", "col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\",", "+X direction. percentile : list, optional Sequence of percentiles to", "\"Phase: %{y:.2f}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "conf_interval, units=units, **kwargs) if fig is None: fig = make_subplots(", ") fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], 50 -", "fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5,", "line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=False, hovertemplate=( \"Nodal", "fig.update_yaxes(title_text=\"<b>Amplitude</b>\") return fig def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args,", "%{y:.3f}\"), **kwargs, ) ) for i, p in enumerate(percentile): fig.add_trace(", "number_dof def plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs,", "in aux_phase] ) angle = p[1] probe_phase[i] = probe_phase[i] -", ") angle = p[1] probe_phase[i] = probe_phase[i] - angle fig.add_trace(", "object with the plot. \"\"\" def __init__(self, time_range, yout, xout,", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "module returns graphs for each type of analyses in st_rotor_assembly.py.", "fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}% - Mode {}\".format(p, j", "axis=0 ), line=dict(width=3.5, color=colors1[i]), opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True", "with the plot. \"\"\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval)", "mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\", legendgroup=f\"Probe", "legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\", ) )", "class ST_ForcedResponseResults: \"\"\"Store stochastic results and provide plots for Forced", "\"\"\" def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp =", "with diagrams for frequency and log dec. \"\"\" def __init__(self,", "%{y:.2e}\"), **kwargs, ) ) for j, p in enumerate(percentile): fig.add_trace(", "\"\"\"Plot phase angle response. This method plots the phase response", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\",", "self.plot_nat_freq(percentile, conf_interval, *args, **kwargs) default_values = dict(showlegend=False) for k, v", "def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit", "name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p), showlegend=True if j == 0", "Campbell Diagram. It's possible to visualize multiples harmonics in a", "{}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20),", "v) fig = go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j", "figure object with the plot. \"\"\" if units == \"m\":", "row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout( polar=dict(", "must be between 0 and 100 inclusive. fig : Plotly", "hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, ) ) for i,", "units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot frequency response. This method plots the", "50 - p / 2, axis=0) p3 = np.percentile(probe_phase, 50", "for each node for each frequency. frequency_range : array Array", "and phase response given an output and an input. Parameters", "observe the response. percentile : list, optional Sequence of percentiles", "conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots =", "* node], axis=0), y=np.mean(self.yout[..., ndof * node + 1], axis=0),", "p in enumerate(conf_interval): p1 = np.percentile(self.yout[..., ndof * node], 50", "legendgroup=\"mean\", showlegend=True if j == 0 else False, hovertemplate=( \"Nodal", "line=dict(width=2.5), name=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\"", "), **kwargs, ) ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof *", "100 inclusive. units : str, optional Unit system Default is", "\"Y - Amplitude: %{y:.2e}\" ), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\")", "(dB) of the frequency response for each pair input/output. phase", "Plotly figure with diagrams for frequency and log dec. \"\"\"", "node : int Select the node to display the respective", "frequency. frequency_range : array Array with the frequencies. magnitude :", "conf_interval=[], *args, **kwargs): \"\"\"Plot Campbell Diagram. This method plots Campbell", "1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p", ") ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig", "off fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1 =", "fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\",", ") fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\",", "conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot frequency response. This method plots the", "+ 1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=(", "name=\"percentile: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y -", "+ p / 2, axis=1) p2 = np.percentile(self.log_dec[j], 50 -", "= np.concatenate((self.time_range, self.time_range[::-1])) for j, p in enumerate(conf_interval): p1 =", "%{y:.2e}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "a set of probes. Parameters ---------- probe : list of", "np.percentile(self.phase, 50 + p / 2, axis=1) p2 = np.percentile(self.phase,", "with amplitude vs frequency phase angle vs frequency. \"\"\" def", "Dec: %{y:.3f}\"), **kwargs, ) ) for i, p in enumerate(percentile):", "nodes_pos self.number_dof = number_dof def plot_1d( self, probe, percentile=[], conf_interval=[],", "x=x, y=np.percentile(self.yout[..., ndof * n], 50 + p / 2,", "specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) for data", "\"\"\" conf_interval = np.sort(conf_interval) percentile = np.sort(percentile) if units ==", "axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude:", "return fig def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs):", "color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\"", "legendgroup=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\",", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for", "ndof * n], 50 - p / 2, axis=0), z=np.percentile(", "j, phs in enumerate(self.phase): aux_phase = phs[:, p[0] * self.number_dof]", "showlegend=False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\"", "hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase", "color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, )", "subplots class ST_TimeResponseResults: \"\"\"Store stochastic results and provide plots for", ") ) color_p += 1 for j, p in enumerate(conf_interval):", "row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2,", "* self.number_dof dofy = p[0] * self.number_dof + 1 angle", "array Array with the frequencies, magnitude (dB) of the frequency", "This method plots Campbell Diagram. Parameters ---------- percentile : list,", "and 100 inclusive. args: optional harmonics : list, optional List", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\",", "color_i = 0 color_p = 0 for i, p in", "Parameters ---------- force_resp : array Array with the force response", "fig.update_yaxes(fig0.layout.yaxis, row=1, col=1) fig.update_xaxes(fig1.layout.xaxis, row=2, col=1) fig.update_yaxes(fig1.layout.yaxis, row=2, col=1) fig.update_layout(", "---------- force_resp : array Array with the force response for", "for each type of analyses in st_rotor_assembly.py. \"\"\" import numpy", "range in rad/s. wd : array Array with the damped", "**kwargs ): \"\"\"Plot time response. This method plots the time", "conf_interval=[], fig=None, **kwargs): \"\"\"Plot frequency response. This method plots the", "axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe", "plots the time response given a tuple of probes with", "the plot. units : str, optional Unit system Default is", "**kwargs): \"\"\"Plot the damped natural frequencies vs frequency. Parameters ----------", "\"Frequency: %{y:.3f}\"), **kwargs, ) ) for i, p in enumerate(percentile):", "legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\",", "axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase:", "---------- probe : list of tuples List with tuples (node,", "\"polar\", \"rowspan\": 2}], [{}, None]] ) # fmt: on for", "fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2,", "fig is None: fig = go.Figure() default_values = dict(mode=\"lines\") conf_interval", "+ p / 2, axis=0) p4 = np.percentile(self.yout[..., ndof *", "percentiles to compute, which must be between 0 and 100", "opacity=0.3, name=\"confidence interval: {}% - Mode {}\".format(p, j + 1),", "class ST_TimeResponseResults: \"\"\"Store stochastic results and provide plots for Time", "Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\"", "vs frequency phase angle vs frequency. \"\"\" fig0 = self.plot_magnitude(percentile,", "* node], 50 + p / 2, axis=0) p2 =", "thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i + 1} - Mean\",", "plotted. The default is to plot 1x. kwargs : optional", "**kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", "kwargs.setdefault(k, v) for i, p in enumerate(probe): dofx = p[0]", "= np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 =", "frequency phase angle vs frequency. \"\"\" fig0 = self.plot_magnitude(percentile, conf_interval,", "\"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.time_range, self.time_range[::-1])) for", "passed to change the plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...)", "vs frequency. This method plots the frequency response magnitude given", "with diagrams for frequency and log dec. \"\"\" fig0 =", "theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval:", "in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], p,", "= np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle))", "= \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": r_axis_label = \"<b>Amplitude", "{p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x", "rad/s. magnitude : array Array with the frequencies, magnitude (dB)", "Python Figure Reference for more information. Returns ------- subplots :", "= ( _probe_resp[0] * np.cos(angle) ** 2 + _probe_resp[1] *", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p,", "50 + p / 2, axis=0), z=np.percentile( self.yout[..., ndof *", "if fig is None: fig = make_subplots( rows=2, cols=2, specs=[[{},", "p / 2, axis=0), z=np.percentile( self.yout[..., ndof * n +", "legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots", "return fig def plot( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\",", "fig def plot_2d(self, node, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot", "orbit response (2D). This function plots orbits for a given", "showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\", legend=dict(", "analyses in st_rotor_assembly.py. \"\"\" import numpy as np from plotly", "enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range,", "with the speed range in rad/s. wd : array Array", "The figure object with the plot. units : str Magnitude", ") color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=\"<b>Phase Angle</b>\") fig.update_layout(**kwargs), return fig", "legendgroup=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\",", "j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0),", "plot layout (e.g. width=800, height=600, ...). *See Plotly Python Figure", "y=line, z=line, line=dict(width=2.0, color=\"black\", dash=\"dashdot\"), showlegend=False, mode=\"lines\", ) ) for", "- Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency:", "+ \"Phase: %{y:.2f}\"), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20),", "hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\" +", "\"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval =", "0 color_i = 0 for i, p in enumerate(probe): probe_phase", "fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i", "yout, xout, number_dof, nodes_list, nodes_pos): self.time_range = time_range self.yout =", "Plotly Python Figure Reference for more information. Returns ------- fig", "bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return subplots class ST_FrequencyResponseResults: \"\"\"Store", "k, v in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace(", "y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i + 1} -", "p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1}", "data in fig0[\"data\"]: data.showlegend = False fig.add_trace(data, row=1, col=1) for", "between 0 and 100 inclusive. args: optional harmonics : list,", "\"\"\"Plot Campbell Diagram. This method plots Campbell Diagram. Parameters ----------", "= px.colors.qualitative.Dark24 colors2 = px.colors.qualitative.Light24 class ST_CampbellResults: \"\"\"Store stochastic results", "X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ), ) return fig", "ndof * n + 1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\",", "of percentiles to compute, which must be between 0 and", "angle response. This method plots the phase response given an", "axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]),", "the time response given a tuple of probes with their", "hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, ) )", "p / 2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1,", "y=np.percentile(self.yout[..., ndof * n], 50 + p / 2, axis=0),", "+ \"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) color_p", ") ) for i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude,", "self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50", "Amplitude: %{z:.2e}\" ), **kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"),", "given node on the rotor system in a 2D view.", "col=1) for data in fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1,", "angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), )", "radialaxis=dict( title_text=r_axis_label, title_font=dict(family=\"Arial\", size=14), gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\",", "nodes_pos: array Rotor nodes axial positions. number_dof : int Number", "fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude", "p in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 + p /", "width=1200, height=900, plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ),", "def __init__(self, forced_resp, magnitude, phase, frequency_range, number_dof): self.forced_resp = forced_resp", "r_axis_label = \"<b>Amplitude (μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\"", "{}% - Mode {}\".format(p, j + 1), legendgroup=\"conf{}{}\".format(j, i), hoverinfo=\"none\",", "fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6, mode=\"lines\", line=dict(width=2.5, color=colors1[color_p]),", "from plotly import graph_objects as go from plotly import io", "z=np.mean(self.yout[..., ndof * n + 1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\",", "returns graphs for each type of analyses in st_rotor_assembly.py. \"\"\"", "go.Scatter( x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i +", "2, axis=1) p2 = np.percentile(self.magnitude, 50 - p / 2,", "- p / 2, axis=0) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3,", "fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response using Plotly.", "results and provide plots for Campbell Diagram. It's possible to", "0 and 100 inclusive. units : str, optional Unit system", "mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\",", "y=np.percentile(self.yout[..., ndof * n], 50 - p / 2, axis=0),", "subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis,", "opacity=0.6, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf_interval{}\".format(p), showlegend=True if j == 0", "Degree of freedom to observe the response. percentile : list,", "plot_bgcolor=\"white\", legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2, ), ) return", ") for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range, y=np.percentile(probe_resp,", "graph_objects.make_subplots() Plotly figure with diagrams for frequency and log dec.", "str, optional Unit system Default is \"mic-pk-pk\" kwargs : optional", "color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"), **kwargs, )", "j, p in enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 + p", "name=\"Mean\", legendgroup=\"mean\", showlegend=True if j == 0 else False, hovertemplate=(", "amplitude vs frequency. This method plots the frequency response magnitude", "%{y:.2e}<br>\" + \"Y - Amplitude: %{z:.2e}\" ), **kwargs, ) )", "1 for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50", "axis=0), z=np.percentile( self.yout[..., ndof * n + 1], 50 -", "p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_phase, p, axis=0), opacity=0.6,", "{}\".format(j + 1), line=dict(width=3, color=colors1[j]), legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log", "opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" +", "for frequency and log dec. \"\"\" def __init__(self, speed_range, wd,", "fig1[\"data\"]: subplots.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: subplots.add_trace(data, row=1,", "linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "(μ pk-pk)</b>\" else: r_axis_label = \"<b>Amplitude (dB)</b>\" for k, v", "It's possible to visualize multiples harmonics in a single plot", "1], p, axis=0), opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X", "array list with nodes from a rotor model. nodes_pos: array", "method plots Campbell Diagram. Parameters ---------- percentile : list, optional", "the frequency and phase response given a set of probes.", "more information. Returns ------- fig : Plotly graph_objects.Figure() The figure", "np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0])", "graph_objects.Figure() The figure object with the plot. \"\"\" default_values =", "each pair input/output. phase : array Array with the frequencies,", "which must be between 0 and 100 inclusive. harmonics: list,", "array System response. xout : array Time evolution of the", "p / 2, axis=1) p2 = np.percentile(self.log_dec[j], 50 - p", "self.time_range = time_range self.yout = yout self.xout = xout self.nodes_list", "for each node on the rotor system in a 3D", "enumerate(self.yout): _probe_resp = operator @ np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j]", "def plot( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ):", "x=self.time_range, y=np.percentile(probe_resp, p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i + 1}", "%{y:.2e}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return", "graph_objects.Figure() The figure object with the plot. units : str", "self.yout = yout self.xout = xout self.nodes_list = nodes_list self.nodes_pos", "angle = p[1] # fmt: off operator = np.array( [[np.cos(angle),", "Rotor nodes axial positions. number_dof : int Number of degrees", "inclusive. units : str, optional Unit system Default is \"mic-pk-pk\"", "for i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1),", "of freedom per shaft element's node Returns ------- fig :", "phase angle vs frequency. \"\"\" def __init__(self, forced_resp, magnitude, phase,", "plot. \"\"\" ndof = self.number_dof default_values = dict(mode=\"lines\") conf_interval =", "legendgroup=\"mean{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"), **kwargs, ) )", "+ 1], p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3, color=colors1[i]), legendgroup=\"perc{}\".format(p),", "__init__(self, time_range, yout, xout, number_dof, nodes_list, nodes_pos): self.time_range = time_range", "thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i),", "\"\"\"Plot polar forced response using Plotly. Parameters ---------- dof :", "for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50 +", "line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" +", "axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile:", "self.speed_range[::-1])) for j in range(self.log_dec.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.log_dec[j], axis=1),", "stochastic results and provide plots for Frequency Response. Parameters ----------", "1} - percentile: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Phase: %{y:.2f}\", ) ) color_p", "axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i", "= np.percentile(self.log_dec[j], 50 - p / 2, axis=1) fig.add_trace( go.Scatter(", "fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout(", "plot (e.g. line=dict(width=4.0, color=\"royalblue\"), opacity=1.0, ...) *See Plotly Python Figure", "given an output and an input using bokeh. Parameters ----------", "= np.percentile(self.phase, 50 - p / 2, axis=1) fig.add_trace( go.Scatterpolar(", "unit system. Default is \"mic-pk-pk\" polar_kwargs : optional Additional key", "/ 2, axis=1) p4 = np.percentile(self.phase, 50 - p /", "figure object with the plot. units : str, optional Unit", ": array Array with the damped natural frequencies log_dec :", "intervals to compute, which must be between 0 and 100", ") ) for i, p in enumerate(conf_interval): fig.add_trace( go.Scatter3d( x=x,", "%{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time (s)</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\")", "def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit response", "return fig class ST_ForcedResponseResults: \"\"\"Store stochastic results and provide plots", "np.percentile(probe_phase, 50 + p / 2, axis=0) p2 = np.percentile(probe_phase,", "node on the rotor system in a 2D view. Parameters", "axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j + 1), line=dict(width=3, color=colors1[j]),", "= \"<b>Amplitude (μ pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" conf_interval", "is \"mic-pk-pk\" kwargs : optional Additional key word arguments can", "with the frequencies, magnitude (dB) of the frequency response for", "p2 = np.percentile(self.log_dec[j], 50 - p / 2, axis=1) fig.add_trace(", "respective orbit response. percentile : list, optional Sequence of percentiles", "0% and 100% inclusive. fig : Plotly graph_objects.Figure() The figure", "str, optional Unit system Default is \"mic-pk-pk\". kwargs : optional", ": int indicate the node where the probe is located.", "bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_phase(self, percentile=[], conf_interval=[],", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p, axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile:", "1} - Mean\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" + \"<b>Phase: %{theta:.2f}</b><br>\" +", "Sequence of percentiles to compute, which must be between 0", "1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", "[[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:,", "= self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots = make_subplots(rows=1, cols=2) for", "self.number_dof] probe_phase[i] = np.array( [i + 2 * np.pi if", "hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for j, p in enumerate(percentile):", "plot to check other speeds which also excite a specific", ": Plotly graph_objects.make_subplots() Plotly figure with diagrams for frequency and", "{i + 1} - Mean\", legendgroup=f\"Probe {i + 1} -", "Plotly graph_objects.Figure() The figure object with the plot. args :", "array Magnitude of the frequency response for node for each", "* n + 1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True", "ndof * n], 50 + p / 2, axis=0), z=np.percentile(", "- Mean\", legendgroup=f\"Probe {i + 1} - Mean\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude:", "thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]), name=f\"Probe {i + 1} - percentile:", "axis=0) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\",", "i, p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0]) for", "subplots.update_layout( plot_bgcolor=\"white\", polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict(", "{p}%\", legendgroup=f\"Probe {i + 1} - percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\"", "the plot. \"\"\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile", "probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced", "\"<b>Phase: %{theta:.2f}</b><br>\" + \"<b>Frequency: %{customdata:.2f}</b>\" ), ) ) for j,", "fmt: on for data in fig0[\"data\"]: data.showlegend = False fig.add_trace(data,", "**kwargs, ) ) for j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range,", "operator @ np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] *", "only (e.g. width=1000, height=800, ...). *See Plotly Python Figure Reference", "data in fig1[\"data\"]: data.showlegend = False fig.add_trace(data, row=2, col=1) for", "to +X direction. percentile : list, optional Sequence of percentiles", "1 fig.update_layout( polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return", "p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors2[i]),", "change the plot layout (e.g. width=800, height=600, ...). *See Plotly", "fig def plot_3d(self, percentile=[], conf_interval=[], fig=None, *args, **kwargs): \"\"\"Plot orbit", "percentile = np.sort(percentile) if fig is None: fig = go.Figure()", "color=colors2[i]), name=\"percentile: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y", "axis=1) p3 = np.percentile(self.phase, 50 + p / 2, axis=1)", "{i + 1} - confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" +", "(_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on fig.add_trace( go.Scatter(", "p2[::-1])), line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i + 1} -", "Figure Reference for more information. Returns ------- fig : Plotly", "Parameters ---------- probe : list of tuples List with tuples", "%{y:.3f}\" ), **kwargs, ) ) for i, p in enumerate(conf_interval):", "def __init__(self, speed_range, magnitude, phase): self.speed_range = speed_range self.magnitude =", "), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ), ) return", "information. Returns ------- fig : Plotly graph_objects.Figure() The figure object", "subplots.add_trace(data, row=1, col=2) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=1) subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis,", ") for i, p in enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50", "tuple of probes with their nodes and orientations. Parameters ----------", "= np.percentile(self.yout[..., ndof * node + 1], 50 - p", "the frequency response magnitude given an output and an input", "x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\"", ") ) for i, p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x,", "= go.Figure() fig.add_trace( go.Scatter( x=np.mean(self.yout[..., ndof * node], axis=0), y=np.mean(self.yout[...,", "xout, number_dof, nodes_list, nodes_pos): self.time_range = time_range self.yout = yout", "and 100 inclusive. fig : Plotly graph_objects.Figure() The figure object", "opacity=1.0, name=f\"Probe {i + 1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\"", "%{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i += 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs)", "object with the plot. \"\"\" conf_interval = np.sort(conf_interval) percentile =", "None]] ) for data in fig0[\"data\"]: subplots.add_trace(data, row=1, col=1) for", "%{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor", "= yout self.xout = xout self.nodes_list = nodes_list self.nodes_pos =", "= np.sort(percentile) if units == \"m\": r_axis_label = \"<b>Amplitude (m)</b>\"", "nodes axial positions. number_dof : int Number of degrees of", "figure object with the plot. units : str Magnitude unit", "figure object with the plot. \"\"\" def __init__(self, time_range, yout,", "* n], p, axis=0), z=np.percentile(self.yout[..., ndof * n + 1],", "self.number_dof = number_dof def plot_magnitude( self, probe, percentile=[], conf_interval=[], fig=None,", "legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude:", "interval: {}%\".format(p), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y -", "subplots.update_yaxes(fig0.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig1.layout.xaxis, row=2, col=1) subplots.update_yaxes(fig1.layout.yaxis, row=2, col=1) subplots.update_layout(", "between 0% and 100% inclusive. units : str, optional Unit", "name=f\"Probe {i + 1} - Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" +", "\"m\": r_axis_label = \"<b>Amplitude (m)</b>\" elif units == \"mic-pk-pk\": r_axis_label", "np.sort(percentile) for k, v in default_values.items(): kwargs.setdefault(k, v) if fig", "subplots = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}],", "): \"\"\"Plot polar forced response using Plotly. Parameters ---------- dof", "cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] ) for", "response. percentile : list, optional Sequence of percentiles to compute,", "operator = np.array( [[np.cos(angle), - np.sin(angle)], [np.cos(angle), + np.sin(angle)]] )", "Array with the Logarithmic decrement Returns ------- subplots : Plotly", "go.Figure() default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "\"\"\"Plot frequency response. This method plots the unbalance response magnitude.", "fig.update_yaxes( title_text=y_axis_label, title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "units, **kwargs) default_values = dict(showlegend=False) for k, v in default_values.items():", "+= 1 fig.update_xaxes(title_text=\"<b>Frequency</b>\") fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self, probe,", "in a 3D view. Parameters ---------- percentile : list, optional", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof *", "legendgroup=f\"Probe {i + 1} - percentile{p}\", hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" +", "= int Number of degrees of freedom per shaft element's", "* h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency:", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean - Mode {}\".format(j", "This function plots orbits for each node on the rotor", "in default_values.items(): kwargs.setdefault(k, v) fig1 = self.plot_phase(percentile, conf_interval, **kwargs) fig2", "for each frequency. phase : array Phase of the frequency", "enumerate(harmonics): fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0, name=\"{}x speed\".format(h),", "damped natural frequencies vs frequency. Parameters ---------- percentile : list,", "color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs,", "enumerate(conf_interval): p1 = np.percentile(probe_phase, 50 + p / 2, axis=0)", "graph_objects.Figure() The figure object with the plot. units : str,", "opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\"", "axis=0) p2 = np.percentile(probe_phase, 50 - p / 2, axis=0)", "self.plot_phase(percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots", "system in a 2D view. Parameters ---------- node : int", "r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase, p, axis=1), customdata=self.speed_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5,", "forced response using Plotly. Parameters ---------- probe : list of", "%{x:.3f}<br>\" + \"Log Dec: %{y:.3f}\"), **kwargs, ) ) for i,", "None: fig = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\":", "time response. This method plots the time response given a", "{}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) for i, p", "hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for j,", "mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1}", "p3 = np.percentile(self.yout[..., ndof * node + 1], 50 +", "frequency. number_dof = int Number of degrees of freedom per", ": 1-dimensional array Time array. yout : array System response.", "else i for i in aux_phase] ) angle = p[1]", "= magnitude self.phase = phase def plot_magnitude( self, percentile=[], conf_interval=[],", "ndof * node], axis=0), y=np.mean(self.yout[..., ndof * node + 1],", "yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False), zaxis=dict(title=dict(text=\"<b>Amplitude - Y</b>\"), showspikes=False), ), )", "axis=0) p4 = np.percentile(probe_phase, 50 - p / 2, axis=0)", "conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the log_dec vs frequency. Parameters ----------", "the damped natural frequencies vs frequency. Parameters ---------- percentile :", ") ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.time_range,", "j in range(self.wd.shape[0]): fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.wd[j], axis=1), opacity=1.0, name=\"Mean", "def plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot amplitude", "harmonics : list, optional List with the harmonics to be", "[i + 2 * np.pi if i < 0 else", "1} - confidence interval: {p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"),", "units == \"m\": y_axis_label = \"<b>Amplitude (m)</b>\" elif units ==", "- Amplitude: %{z:.2e}\" ), **kwargs, ) ) for i, p", ") return fig class ST_ForcedResponseResults: \"\"\"Store stochastic results and provide", "self.phase = phase def plot_magnitude( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs,", "axis=0), customdata=self.frequency_range, thetaunit=\"radians\", mode=\"lines\", line=dict(width=3.0, color=list(tableau_colors)[i]), name=f\"Probe {i + 1}", "): \"\"\"Plot frequency response. This method plots the unbalance response", "color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), hovertemplate=( \"X -", "/ 2, axis=1) fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\",", "title_text=\"<b>Phase Angle</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True,", "+ \"Y - Amplitude: %{y:.2e}\" ), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\")", "color=colors1[color_p]), name=f\"Probe {i + 1} - percentile: {p}%\", legendgroup=f\"Probe {i", ": int Select the node to display the respective orbit", "graph_objects.Figure() The figure object with the plot. \"\"\" conf_interval =", "p / 2, axis=0) p4 = np.percentile(probe_phase, 50 - p", "**kwargs) fig1 = self.plot_phase(probe, percentile, conf_interval, **kwargs) fig2 = self.plot_polar_bode(probe,", "graphs for each type of analyses in st_rotor_assembly.py. \"\"\" import", "for node for each frequency. number_dof = int Number of", "opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude:", "the force response for each node for each frequency. frequency_range", ") fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude - X</b>\"), showspikes=False),", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof", "{}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) )", "p / 2, axis=0) p2 = np.percentile(self.yout[..., ndof * node],", "+ \"Frequency: %{y:.3f}\" ), **kwargs, ) ) for i, p", "which must be between 0 and 100 inclusive. units :", "nodes_list self.nodes_pos = nodes_pos self.number_dof = number_dof def plot_1d( self,", "axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i + 1} - percentile: {p}%\",", "v in default_values.items(): kwargs.setdefault(k, v) for i, p in enumerate(probe):", "return fig def plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None, units=\"mic-pk-pk\",", "Response and Orbit Response. Parameters ---------- time_range : 1-dimensional array", "be between 0 and 100 inclusive. conf_interval : list, optional", "j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp, p, axis=0),", "np.zeros_like(self.phase[:, :, 0]) for j, phs in enumerate(self.phase): aux_phase =", "fig0 = self.plot_magnitude(probe, percentile, conf_interval, units=units, **kwargs) fig1 = self.plot_phase(probe,", "fig = go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d( x=self.nodes_pos, y=line,", "arguments can be passed to change the plot (e.g. line=dict(width=4.0,", "title_text=\"<b>Frequency</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", "+ \"<b>Frequency: %{customdata:.2f}</b>\" ), **kwargs, ) ) for i, p", "freedom per shaft element's node Returns ------- fig : Plotly", "fig.update_layout(**kwargs), return fig def plot_polar_bode( self, probe, percentile=[], conf_interval=[], fig=None,", "frequency phase angle vs frequency. \"\"\" def __init__(self, forced_resp, magnitude,", "p in enumerate(conf_interval): p1 = np.percentile(self.wd[j], 50 + p /", "angle vs frequency. \"\"\" def __init__(self, speed_range, magnitude, phase): self.speed_range", "fig.add_trace(data, row=1, col=1) for data in fig1[\"data\"]: data.showlegend = False", "100% inclusive. fig : Plotly graph_objects.Figure() The figure object with", "= speed_range self.wd = wd self.log_dec = log_dec def plot_nat_freq(self,", "p / 2, axis=1) p4 = np.percentile(self.phase, 50 - p", "hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x =", "y=np.mean(probe_resp, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3, color=list(tableau_colors)[i]), name=f\"Probe {i + 1}", "plot axes with magnitude plot. \"\"\" if units == \"m\":", "subplots.add_trace(data, row=2, col=1) for data in fig2[\"data\"]: subplots.add_trace(data, row=1, col=2)", "+ 1], axis=0), line=dict(width=5, color=\"black\"), name=\"Mean\", legendgroup=\"mean\", showlegend=True if j", "of degrees of freedom per shaft element's node Returns -------", "in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) * self.nodes_pos[n] fig.add_trace( go.Scatter3d( x=x,", ") for j, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.frequency_range, y=np.percentile(probe_resp,", "the plot. args : optional Additional plot axes kwargs :", "\"polar\", \"rowspan\": 2}], [{}, None]] ) for data in fig0[\"data\"]:", "if fig is None: fig = go.Figure() line = np.zeros(len(self.nodes_pos))", "subplots.update_yaxes(fig1.layout.yaxis, row=1, col=1) subplots.update_xaxes(fig0.layout.xaxis, row=1, col=2) subplots.update_yaxes(fig1.layout.yaxis, row=1, col=2) subplots.update_layout(", "bordercolor=\"black\", borderwidth=2, ), ) return fig def plot_log_dec(self, percentile=[], conf_interval=[],", "2, axis=1) fig.add_trace( go.Scatter( x=x, y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\",", "with the force response for each node for each frequency.", "**kwargs) subplots = make_subplots(rows=1, cols=2) for data in fig0[\"data\"]: subplots.add_trace(data,", ") return fig def plot(self, percentile=[], conf_interval=[], *args, **kwargs): \"\"\"Plot", "from plotly.subplots import make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default =", "Mean\", line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) )", "[np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.yout[:, :, 0]) for", "frequency and log dec. \"\"\" fig0 = self.plot_nat_freq(percentile, conf_interval, *args,", ") fig.update_yaxes( title_text=\"<b>Damped Natural Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", "fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i + 1} - confidence interval:", "Frequencies</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, )", "+ \"Frequency: %{y:.3f}\" ), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\",", "z=np.percentile( self.yout[..., ndof * n + 1], 50 - p", "must be between 0% and 100% inclusive. fig : Plotly", "i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.wd[j], p, axis=1),", "probe_phase[i] - angle fig.add_trace( go.Scatterpolar( r=np.mean(probe_resp, axis=0), theta=np.mean(probe_phase, axis=0), customdata=self.frequency_range,", "fig is None: fig = go.Figure() color_i = 0 color_p", "), **kwargs, ) ) fig.update_layout( scene=dict( xaxis=dict(title=dict(text=\"<b>Rotor Length</b>\"), showspikes=False), yaxis=dict(title=dict(text=\"<b>Amplitude", "self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar forced response", "probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\",", ") return fig def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs): \"\"\"Plot", "confidence intervals to compute, which must be between 0% and", "= \"<b>Amplitude (dB)</b>\" if fig is None: fig = go.Figure()", "+ 1), legendgroup=\"conf{}{}\".format(j, i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"", "self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values = dict(showlegend=False) for k, v", "default_values = dict(showlegend=False) for k, v in default_values.items(): kwargs.setdefault(k, v)", "self.number_dof = number_dof def plot_1d( self, probe, percentile=[], conf_interval=[], fig=None,", "in enumerate(conf_interval): p1 = np.percentile(self.phase, 50 + p / 2,", "Array with the damped natural frequencies log_dec : array Array", "is None: fig = go.Figure() line = np.zeros(len(self.nodes_pos)) fig.add_trace( go.Scatter3d(", "for i, p in enumerate(conf_interval): p1 = np.percentile(self.magnitude, 50 +", "make_subplots from ross.plotly_theme import tableau_colors pio.renderers.default = \"browser\" # set", "%{y:.2e}\" ), **kwargs, ) ) for i, p in enumerate(conf_interval):", "fig.add_trace( go.Scatter( x=self.speed_range, y=self.speed_range * h, opacity=1.0, name=\"{}x speed\".format(h), line=dict(width=3,", "from a rotor model. nodes_pos: array Rotor nodes axial positions.", "compute, which must be between 0 and 100 inclusive. args:", "- Amplitude: %{y:.2e}<br>\" + \"Y - Amplitude: %{z:.2e}\" ), **kwargs,", "in default_values.items(): kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range,", "%{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) for j, p in enumerate(percentile): fig.add_trace(", "p / 2, axis=1) p2 = np.percentile(self.magnitude, 50 - p", "0]) for j, y in enumerate(self.yout): _probe_resp = operator @", "*args, **kwargs) default_values = dict(showlegend=False) for k, v in default_values.items():", "The default is to plot 1x. kwargs : optional Additional", "is located. orientation : float, probe orientation angle about the", "of degrees of freedom per shaft element's node. Returns -------", "ross.plotly_theme import tableau_colors pio.renderers.default = \"browser\" # set Plotly palette", "size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200,", "node on the rotor system in a 3D view. Parameters", "fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n], p, axis=0), z=np.percentile(self.yout[...,", "* np.sin(angle)) ** 2) # fmt: on probe_phase = np.zeros_like(self.phase[:,", "(_probe_resp[1] * np.sin(angle)) ** 2) # fmt: on probe_phase =", "the frequency and phase response given an output and an", "method plots the time response given a tuple of probes", "\"\"\" fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values = dict(showlegend=False)", "probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs in enumerate(self.phase):", "linecolor=\"black\", mirror=True, ) fig.update_layout( plot_bgcolor=\"white\", width=1200, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14),", "- Amplitude: %{y:.2e}\" ), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Amplitude</b>\") fig.update_yaxes(title_text=\"<b>Amplitude</b>\") fig.update_layout(title=\"<b>Rotor", "color=\"black\"), hovertemplate=( \"X - Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude:", "j == 0 else False, hovertemplate=( \"Nodal Position: %{x:.2f}<br>\" +", "np.sqrt((_probe_resp[0] * np.cos(angle)) ** 2 + (_probe_resp[1] * np.sin(angle)) **", "\"\"\"Store stochastic results and provide plots for Frequency Response. Parameters", "self.number_dof default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval) percentile = np.sort(percentile)", "gridcolor=\"lightgray\", exponentformat=\"power\", ), angularaxis=dict( tickfont=dict(size=14), gridcolor=\"lightgray\", linecolor=\"black\", linewidth=2.5, ), ),", "confidence interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i +=", "width=1000, height=800, ...). *See Plotly Python Figure Reference for more", "opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, )", "plots Campbell Diagram. Parameters ---------- percentile : list, optional Sequence", "* n + 1], p, axis=0), opacity=1.0, name=\"percentile: {}%\".format(p), line=dict(width=3,", "legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs, ) ) for i, p in", ") color_p += 1 x = np.concatenate((self.frequency_range, self.frequency_range[::-1])) for j,", "frequency. phase : array Phase of the frequency response for", "\"\"\" def __init__(self, speed_range, wd, log_dec): self.speed_range = speed_range self.wd", "= forced_resp self.magnitude = magnitude self.phase = phase self.frequency_range =", "fig.add_trace( go.Scatter( x=np.percentile(self.yout[..., ndof * node], p, axis=0), y=np.percentile(self.yout[..., ndof", "probes. Parameters ---------- probe : list of tuples List with", "fig.add_trace( go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors1[i]), fill=\"toself\",", "phase angle response. This method plots the phase response given", "2, axis=0) p4 = np.percentile(self.yout[..., ndof * node + 1],", ") for j, n in enumerate(self.nodes_list): x = np.ones(self.yout.shape[1]) *", "conf_interval, units, **kwargs) default_values = dict(showlegend=False) for k, v in", "+ 2 * np.pi if i < 0 else i", "), ), ) return fig def plot(self, percentile=[], conf_interval=[], units=\"mic-pk-pk\",", "**kwargs, ) ) fig.add_trace( go.Scatter3d( x=x, y=np.percentile(self.yout[..., ndof * n],", "z=np.percentile( self.yout[..., ndof * n + 1], 50 + p", "forced_resp self.magnitude = magnitude self.phase = phase self.frequency_range = frequency_range", "degrees of freedom per shaft element's node. Returns ------- subplots", "polar_bgcolor=\"white\", width=1800, height=900, polar=dict( radialaxis=fig2.layout.polar.radialaxis, angularaxis=fig2.layout.polar.angularaxis, ), legend=dict( font=dict(family=\"sans-serif\", size=14),", "speed range in rad/s. magnitude : array Array with the", "passed to change the plot layout (e.g. width=800, height=600, ...).", "opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"),", "p2[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p), legendgroup=\"conf{}\".format(i),", "plot. kwargs : optional Additional key word arguments can be", "self.nodes_pos = nodes_pos self.number_dof = number_dof def plot_1d( self, probe,", "time response given a tuple of probes with their nodes", "mirror=True, ) fig.update_yaxes( title_text=\"<b>Logarithmic decrement</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16), gridcolor=\"lightgray\", showline=True,", "def plot_polar_bode( self, percentile=[], conf_interval=[], units=\"mic-pk-pk\", **kwargs, ): \"\"\"Plot polar", "object with the plot. \"\"\" default_values = dict(mode=\"lines\") conf_interval =", "y=np.concatenate((p1, p2[::-1])), mode=\"lines\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i", "\"mic-pk-pk\" polar_kwargs : optional Additional key word arguments can be", "bokeh. Parameters ---------- percentile : list, optional Sequence of percentiles", "compute, which must be between 0 and 100 inclusive. kwargs", "**kwargs, ) ) for j, p in enumerate(percentile): fig.add_trace( go.Scatter(", "plots the frequency response magnitude given an output and an", "probe_phase[i] = probe_phase[i] - angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0),", "List with the harmonics to be plotted. The default is", "[np.cos(angle), + np.sin(angle)]] ) probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for", "p2 = np.percentile(probe_resp, 50 - p / 2, axis=0) fig.add_trace(", "np.vstack((mag[:, dofx], mag[:, dofy])) probe_resp[i] = np.sqrt((_probe_resp[0] * np.cos(angle)) **", "(e.g. width=800, height=600, ...). *See Plotly Python Figure Reference for", "1 angle = p[1] # fmt: off operator = np.array(", "p, axis=0), y=np.percentile(self.yout[..., ndof * node + 1], p, axis=0),", "fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.phase, axis=1), opacity=1.0, name=\"Mean\", line=dict(width=3, color=\"black\"), legendgroup=\"mean\",", "about the shaft. The 0 refers to +X direction. percentile", "an output and an input. Parameters ---------- percentile : list,", "vector. nodes_list: array list with nodes from a rotor model.", "showlegend=True if j == 0 else False, hovertemplate=( \"Nodal Position:", "go.Figure() x = np.concatenate((self.speed_range, self.speed_range[::-1])) for j, h in enumerate(harmonics):", "figure object with the plot. args : optional Additional plot", "\"X - Amplitude: %{x:.2e}<br>\" + \"Y - Amplitude: %{y:.2e}\" ),", "the phase response given a set of probes. Parameters ----------", ": float, probe orientation angle about the shaft. The 0", "import graph_objects as go from plotly import io as pio", "p1 = np.percentile(probe_resp, 50 + p / 2, axis=0) p2", "pair input/output. phase : array Array with the frequencies, phase", "frequency response. This method plots the phase response given a", "%{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, ) ) for i,", "2, axis=1) p2 = np.percentile(self.phase, 50 - p / 2,", "fig is None: fig = go.Figure() color_p = 0 color_i", "array Phase of the frequency response for node for each", "showlegend=False, mode=\"lines\", ) ) for j, n in enumerate(self.nodes_list): x", "go.Scatterpolar( r=np.concatenate((p1, p2[::-1])), theta=np.concatenate((p3, p4[::-1])), thetaunit=\"radians\", line=dict(width=1, color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i],", "0 refers to +X direction. percentile : list, optional Sequence", "This function plots orbits for a given node on the", "default is to plot 1x. kwargs : optional Additional key", "**kwargs) fig2 = self.plot_polar_bode(percentile, conf_interval, units, **kwargs) subplots = make_subplots(", "speed_range self.wd = wd self.log_dec = log_dec def plot_nat_freq(self, percentile=[],", ") ) x = np.concatenate((self.speed_range, self.speed_range[::-1])) for i, p in", ") return fig def plot_log_dec(self, percentile=[], conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot", "kwargs.setdefault(k, v) fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots =", "from plotly import express as px from plotly import graph_objects", "line=dict(width=3.0), hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for", "**kwargs): \"\"\"Plot frequency response. This method plots the phase response", ") return subplots class ST_FrequencyResponseResults: \"\"\"Store stochastic results and provide", "magnitude : array Array with the frequencies, magnitude (dB) of", "interval: {}%\".format(p), legendgroup=\"conf{}\".format(i), **kwargs, ) ) fig.update_layout( polar=dict( radialaxis=dict( title_text=r_axis_label,", "y=np.mean(self.yout[..., ndof * node + 1], axis=0), opacity=1.0, name=\"Mean\", line=dict(width=3,", "optional Sequence of confidence intervals to compute, which must be", ": Plotly graph_objects.Figure() Bokeh plot axes with magnitude plot. \"\"\"", ":, 0]) for j, y in enumerate(self.yout): _probe_resp = operator", "fig1 = self.plot_log_dec(percentile, conf_interval, *args, **kwargs) subplots = make_subplots(rows=1, cols=2)", "\"\"\"Plot orbit response (3D). This function plots orbits for each", "%{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) for i, p", "Plotly. Parameters ---------- percentile : list, optional Sequence of percentiles", "vs frequency. \"\"\" fig0 = self.plot_magnitude(percentile, conf_interval, units, **kwargs) default_values", "{p}%\", hovertemplate=(\"Time: %{x:.3f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, ) ) fig.update_xaxes(title_text=\"<b>Time", "data in fig2[\"data\"]: fig.add_trace(data, row=1, col=2) fig.update_xaxes(fig0.layout.xaxis, row=1, col=1) fig.update_yaxes(fig0.layout.yaxis,", "subplots.update_layout( plot_bgcolor=\"white\", width=1800, height=900, legend=dict( font=dict(family=\"sans-serif\", size=14), bgcolor=\"white\", bordercolor=\"black\", borderwidth=2,", "rows=2, cols=2, specs=[[{}, {\"type\": \"polar\", \"rowspan\": 2}], [{}, None]] )", "fig.update_yaxes(title_text=y_axis_label) fig.update_layout(**kwargs) return fig def plot_phase(self, probe, percentile=[], conf_interval=[], fig=None,", "be between 0 and 100 inclusive. harmonics: list, optional List", "y_axis_label = \"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\") conf_interval = np.sort(conf_interval)", "r_axis_label = \"<b>Amplitude (dB)</b>\" if fig is None: fig =", "line=dict(width=3, color=\"black\"), legendgroup=\"mean\", hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Amplitude: %{y:.2e}\"), **kwargs, )", "opacity=0.6, line=dict(width=2.5, color=colors2[i]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}\".format(i), hovertemplate=( \"<b>Amplitude: %{r:.2e}</b><br>\" +", "50 - p / 2, axis=0), z=np.percentile( self.yout[..., ndof *", "* self.number_dof] probe_phase[i] = np.array( [i + 2 * np.pi", "pk-pk)</b>\" else: y_axis_label = \"<b>Amplitude (dB)</b>\" default_values = dict(mode=\"lines\") conf_interval", "= \"<b>Amplitude (dB)</b>\" for k, v in default_values.items(): kwargs.setdefault(k, v)", "np.vstack((y[:, dofx], y[:, dofy])) probe_resp[j] = ( _probe_resp[0] * np.cos(angle)", "Diagram. Parameters ---------- percentile : list, optional Sequence of percentiles", "arguments can be passed to change the plot layout only", "self.frequency_range[::-1])) for j, p in enumerate(conf_interval): p1 = np.percentile(probe_resp, 50", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.magnitude, p,", "import express as px from plotly import graph_objects as go", "hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_p += 1 x =", "fig.add_trace( go.Scatter3d( x=x, y=np.mean(self.yout[..., ndof * n], axis=0), z=np.mean(self.yout[..., ndof", "ndof * n], axis=0), z=np.mean(self.yout[..., ndof * n + 1],", "col=1) for data in fig1[\"data\"]: data.showlegend = False fig.add_trace(data, row=2,", "tuples List with tuples (node, orientation angle). node : int", "the frequency response for node for each frequency. phase :", "...). *See Plotly Python Figure Reference for more information. Returns", "np.zeros_like(self.yout[:, :, 0]) for j, y in enumerate(self.yout): _probe_resp =", "rad/s. wd : array Array with the damped natural frequencies", "stochastic results and provide plots for Campbell Diagram. It's possible", "kwargs.setdefault(k, v) fig = go.Figure() fig.add_trace( go.Scatter( x=self.speed_range, y=np.mean(self.magnitude, axis=1),", "**kwargs): \"\"\"Plot orbit response (3D). This function plots orbits for", "i for i in aux_phase] ) angle = p[1] probe_phase[i]", "results and provide plots for Frequency Response. Parameters ---------- speed_range", "+ \"Log Dec: %{y:.3f}\"), **kwargs, ) ) for i, p", "ST_FrequencyResponseResults: \"\"\"Store stochastic results and provide plots for Frequency Response.", "speed\".format(h), line=dict(width=3, color=colors1[j], dash=\"dashdot\"), legendgroup=\"speed{}\".format(j), hovertemplate=(\"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\"),", "\"X - Amplitude: %{y:.2e}<br>\" + \"Y - Amplitude: %{z:.2e}\" ),", ": array Array with the frequencies, magnitude (dB) of the", "go.Scatter( x=np.percentile(self.yout[..., ndof * node], p, axis=0), y=np.percentile(self.yout[..., ndof *", "color=colors2[color_i]), fill=\"toself\", fillcolor=colors2[color_i], opacity=0.5, name=f\"Probe {i + 1} - confidence", "express as px from plotly import graph_objects as go from", "), **kwargs, ) ) fig.update_xaxes( title_text=\"<b>Rotor speed</b>\", title_font=dict(family=\"Arial\", size=20), tickfont=dict(size=16),", "+ \"Amplitude: %{y:.2e}\"), **kwargs, ) ) x = np.concatenate((self.speed_range, self.speed_range[::-1]))", "polar=dict( radialaxis=dict(title_text=r_axis_label, exponentformat=\"E\"), angularaxis=dict(exponentformat=\"E\"), ), **kwargs, ) return fig def", "freedom to observe the response. percentile : list, optional Sequence", "set Plotly palette of colors colors1 = px.colors.qualitative.Dark24 colors2 =", "axis=1), opacity=0.6, line=dict(width=2.5, color=colors2[j]), name=\"percentile: {}%\".format(p), legendgroup=\"percentile{}{}\".format(j, i), hoverinfo=\"none\", **kwargs,", "diagrams for frequency and log dec. \"\"\" fig0 = self.plot_nat_freq(percentile,", "Campbell Diagram. Parameters ---------- percentile : list, optional Sequence of", "orbit response. percentile : list, optional Sequence of percentiles to", "with the plot. kwargs : optional Additional key word arguments", "/ 2, axis=0) p2 = np.percentile(probe_phase, 50 - p /", "y=np.percentile(self.yout[..., ndof * node + 1], p, axis=0), opacity=0.6, line=dict(width=2.5,", "the plot. \"\"\" ndof = self.number_dof default_values = dict(mode=\"lines\") conf_interval", "each node for each frequency. frequency_range : array Array with", "node for each frequency. frequency_range : array Array with the", "The default is bokeh Parameters ---------- speed_range : array Array", "hovertemplate=(\"Frequency: %{x:.2f}<br>\" + \"Phase: %{y:.2f}\"), **kwargs, ) ) for i,", "probe_resp = np.zeros_like(self.magnitude[:, :, 0]) for j, mag in enumerate(self.magnitude):", "p, axis=0), theta=np.percentile(probe_phase, p, axis=0), customdata=self.frequency_range, thetaunit=\"radians\", opacity=0.6, line=dict(width=2.5, color=colors1[color_p]),", "**kwargs): \"\"\"Plot orbit response (2D). This function plots orbits for", "- angle fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_phase, axis=0), opacity=1.0, mode=\"lines\", line=dict(width=3,", "p, axis=0), opacity=0.6, line=dict(width=2.5), name=f\"Probe {i + 1} - percentile:", "line=dict(width=1), fill=\"toself\", fillcolor=colors1[j], opacity=0.5, name=f\"Probe {i + 1} - confidence", "gridcolor=\"lightgray\", showline=True, linewidth=2.5, linecolor=\"black\", mirror=True, ) fig.update_layout( width=1200, height=900, plot_bgcolor=\"white\",", "n], 50 - p / 2, axis=0), z=np.percentile( self.yout[..., ndof", "y=np.concatenate((p3, p4[::-1])), line=dict(width=1, color=colors1[i]), fill=\"toself\", fillcolor=colors1[i], opacity=0.5, name=\"confidence interval: {}%\".format(p),", "must be between 0 and 100 inclusive. conf_interval : list,", "= phase self.frequency_range = frequency_range self.number_dof = number_dof def plot_magnitude(", "np.sin(angle)) ** 2) # fmt: on fig.add_trace( go.Scatter( x=self.frequency_range, y=np.mean(probe_resp,", "type of analyses in st_rotor_assembly.py. \"\"\" import numpy as np", "*args, **kwargs) subplots = make_subplots(rows=1, cols=2) for data in fig0[\"data\"]:", ": Plotly graph_objects.Figure() The figure object with the plot. args", "aux_phase] ) angle = p[1] probe_phase[i] = probe_phase[i] - angle", "**kwargs): \"\"\"Plot Campbell Diagram. This method plots Campbell Diagram. Parameters", "conf_interval=[], harmonics=[1], **kwargs): \"\"\"Plot the damped natural frequencies vs frequency.", "which must be between 0% and 100% inclusive. units :", "p1 = np.percentile(self.magnitude, 50 + p / 2, axis=1) p2", "%{x:.2f}<br>\" + \"X - Amplitude: %{y:.2e}<br>\" + \"Y - Amplitude:", "node where the probe is located. orientation : float, probe", "{p}%\", legendgroup=f\"Probe {i + 1} - confidence interval: {p}%\", )", "them using the attribute plot_type. The default is bokeh Parameters", "units, **kwargs) subplots = make_subplots( rows=2, cols=2, specs=[[{}, {\"type\": \"polar\",", "between 0 and 100 inclusive. kwargs : optional Additional key", "), ) return fig def plot_phase(self, percentile=[], conf_interval=[], **kwargs): \"\"\"Plot", "i), hovertemplate=( \"Frequency: %{x:.3f}<br>\" + \"Frequency: %{y:.3f}\" ), **kwargs, )", "in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0]) for j, phs", "for i, p in enumerate(percentile): fig.add_trace( go.Scatter( x=self.speed_range, y=np.percentile(self.log_dec[j], p,", "= wd self.log_dec = log_dec def plot_nat_freq(self, percentile=[], conf_interval=[], harmonics=[1],", "---------- percentile : list, optional Sequence of percentiles to compute,", "i, p in enumerate(percentile): fig.add_trace( go.Scatterpolar( r=np.percentile(self.magnitude, p, axis=1), theta=np.percentile(self.phase,", "Response. Parameters ---------- force_resp : array Array with the force", "can be passed to change the plot layout (e.g. width=800,", "self.speed_range[::-1])) for i, p in enumerate(conf_interval): p1 = np.percentile(self.phase, 50", "for i, p in enumerate(probe): probe_phase = np.zeros_like(self.phase[:, :, 0])", "array Time array. yout : array System response. xout :", "interval: {p}%\", hovertemplate=\"Frequency: %{x:.2f}<br>Amplitude: %{y:.2e}\", ) ) color_i += 1", "percentile: {p}%\", legendgroup=f\"Probe {i + 1} - percentile: {p}%\", hovertemplate=\"Frequency:", "graph_objects.Figure() The figure object with the plot. \"\"\" def __init__(self,", "inclusive. fig : Plotly graph_objects.Figure() The figure object with the", "y=np.concatenate((p1, p2[::-1])), line=dict(width=1, color=colors1[j]), fill=\"toself\", fillcolor=colors1[j], opacity=0.3, name=\"confidence interval: {}%", "list, optional List with the harmonics to be plotted. The", "enumerate(conf_interval): p1 = np.percentile(self.log_dec[j], 50 + p / 2, axis=1)" ]
[ "import random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray =", "= scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\")", "might return # # [[0 0 1] a.k.a. D D", "in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy]", "= moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] = playerBmove return history", "etc. # C = cooperate, stay silent, comply, upload files,", "history[0,turn] = playerAmove history[1,turn] = playerBmove return history def tallyRoundScores(history):", "import os import itertools import importlib import numpy as np", "0 1] a.k.a. D D C # [1 1 1]]", "an equal (low) chance of being the final turn. history", "cooperated all three times. def getVisibleHistory(history, player, turn): historySoFar =", "None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games are a minimum", "\"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng): result = stri for i", "this array is how many points you receive if you", "historySoFar = history[:,:turn].copy() if player == 1: historySoFar = np.flip(historySoFar,0)", "\") f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for", "there have been 3 turns, and we have defected twice", "f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\")", "playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return", "axis is which turn. (0 = first turn, 1 =", "= runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA,", "= np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB", "f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\") for p in range(2): for", "memoryA = None memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random())) #", "= None memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The", "= 0 scoreB = 0 ROUND_LENGTH = history.shape[1] for turn", "history[0,turn] playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove]", "= importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB = None LENGTH_OF_GAME =", "a.k.a. C C C # # if there have been", "importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB = None", "The games are a minimum of 50 turns long. The", "turn in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove = history[1,turn] scoreA", "in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB)", "outFile): print(\"Starting tournament, reading files from \"+inFolder) scoreKeeper = {}", "cooperate, stay silent, comply, upload files, etc. # Returns a", "= history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH,", "= tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA", "in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove = history[1,turn] scoreA +=", "files from \"+inFolder) scoreKeeper = {} STRATEGY_LIST = [] for", "of this array is how many points you receive if", "range(len(stri),leng): result = result+\" \" return result def runFullPairingTournament(inFolder, outFile):", "a 2-by-n numpy array. The first axis is which player", "games are a minimum of 50 turns long. The np.log", "player, turn): historySoFar = history[:,:turn].copy() if player == 1: historySoFar", "points you receive if you do play i, and your", "in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score = scoresNumpy[i] scorePer =", "itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f,", "historySoFar = np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0])", "f.write(\"\\n\") def pad(stri, leng): result = stri for i in", "getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy() if player == 1:", "next turn, etc. # For example, it might return #", "def getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy() if player ==", "betray, sabotage, free-ride, etc. # C = cooperate, stay silent,", "return result def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading files from", "which player (0 = us, 1 = opponent) # The", "C # # if there have been 3 turns, and", "= roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\")", "here guarantees that every turn after the 50th has an", "= [\"D\",\"C\"] # D = defect, betray, sabotage, free-ride, etc.", "# if there have been 3 turns, and we have", "us, 1 = opponent) # The second axis is which", "times. def getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy() if player", "result = result+\" \" return result def runFullPairingTournament(inFolder, outFile): print(\"Starting", "history[:,:turn].copy() if player == 1: historySoFar = np.flip(historySoFar,0) return historySoFar", "range(2): for t in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\" \")", "= moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn]", "i, and your opponent does play j. moveLabels = [\"D\",\"C\"]", "for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def", "\"results.txt\" pointsArray = [[1,5],[0,3]] # The i-j-th element of this", "# For example, it might return # # [[0 0", "turns, and we have defected twice then cooperated once, #", "scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)):", "roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\") for p", "turn in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB =", "return historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1])", "= score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done", "= us, 1 = opponent) # The second axis is", "= history.shape[1] for turn in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove", "of being the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn", "history def tallyRoundScores(history): scoreA = 0 scoreB = 0 ROUND_LENGTH", "and we have defected twice then cooperated once, # and", "# The i-j-th element of this array is how many", "history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH", "array. The first axis is which player (0 = us,", "\"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray = [[1,5],[0,3]] # The i-j-th", "print(\"Starting tournament, reading files from \"+inFolder) scoreKeeper = {} STRATEGY_LIST", "pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA, scoresB):", "Returns a 2-by-n numpy array. The first axis is which", "defect, betray, sabotage, free-ride, etc. # C = cooperate, stay", "= next turn, etc. # For example, it might return", "def outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\"", "silent, comply, upload files, etc. # Returns a 2-by-n numpy", "first turn, 1 = next turn, etc. # For example,", "def pad(stri, leng): result = stri for i in range(len(stri),leng):", "tallyRoundScores(history): scoreA = 0 scoreB = 0 ROUND_LENGTH = history.shape[1]", "os import itertools import importlib import numpy as np import", "np import random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray", "history.shape[1] for turn in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove =", "leng): result = stri for i in range(len(stri),leng): result =", "\"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done with everything! Results", "if player == 1: historySoFar = np.flip(historySoFar,0) return historySoFar def", "for turn in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB", "open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA,", "reading files from \"+inFolder) scoreKeeper = {} STRATEGY_LIST = []", "a.k.a. D D C # [1 1 1]] a.k.a. C", "roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy", "= \"results.txt\" pointsArray = [[1,5],[0,3]] # The i-j-th element of", "scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank", "= first turn, 1 = next turn, etc. # For", "D D C # [1 1 1]] a.k.a. C C", "is which turn. (0 = first turn, 1 = next", "for t in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\")", "STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0 f =", "= [] for file in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for", "for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng): result = stri", "score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng): result =", "= playerBmove return history def tallyRoundScores(history): scoreA = 0 scoreB", "p in range(2): for t in range(roundHistory.shape[1]): move = roundHistory[p,t]", "# The games are a minimum of 50 turns long.", "scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy =", "axis is which player (0 = us, 1 = opponent)", "1 = opponent) # The second axis is which turn.", "pointsArray = [[1,5],[0,3]] # The i-j-th element of this array", "playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] = playerBmove", "twice then cooperated once, # and our opponent has cooperated", "range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn]", "in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\")", "it might return # # [[0 0 1] a.k.a. D", "i-j-th element of this array is how many points you", "# [1 1 1]] a.k.a. C C C # #", "(P2)\\n\") for p in range(2): for t in range(roundHistory.shape[1]): move", "then cooperated once, # and our opponent has cooperated all", "2-by-n numpy array. The first axis is which player (0", "is how many points you receive if you do play", "comply, upload files, etc. # Returns a 2-by-n numpy array.", "scoreA += pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def", "which turn. (0 = first turn, 1 = next turn,", "[] for file in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy", "itertools import importlib import numpy as np import random STRATEGY_FOLDER", "pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair,", "[1 1 1]] a.k.a. C C C # # if", "t in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final", "= playerAmove history[1,turn] = playerBmove return history def tallyRoundScores(history): scoreA", "turns long. The np.log here guarantees that every turn after", "range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for", "moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB", "\"+pair[1]+\" (P2)\\n\") for p in range(2): for t in range(roundHistory.shape[1]):", "50th has an equal (low) chance of being the final", "if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0", "moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB = None LENGTH_OF_GAME", "sabotage, free-ride, etc. # C = cooperate, stay silent, comply,", "int(200-40*np.log(random.random())) # The games are a minimum of 50 turns", "scoreKeeper = {} STRATEGY_LIST = [] for file in os.listdir(inFolder):", "in STRATEGY_LIST: scoreKeeper[strategy] = 0 f = open(outFile,\"w+\") for pair", "\"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri,", "free-ride, etc. # C = cooperate, stay silent, comply, upload", "i in range(len(stri),leng): result = result+\" \" return result def", "= 0 f = open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST, r=2):", "= defect, betray, sabotage, free-ride, etc. # C = cooperate,", "C C # # if there have been 3 turns,", "1: historySoFar = np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA =", "= \"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray = [[1,5],[0,3]] # The", "{} STRATEGY_LIST = [] for file in os.listdir(inFolder): if file.endswith(\".py\"):", "history[1,turn] = playerBmove return history def tallyRoundScores(history): scoreA = 0", "STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray = [[1,5],[0,3]] #", "def tallyRoundScores(history): scoreA = 0 scoreB = 0 ROUND_LENGTH =", "np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove,", "scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory,", "pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\") for", "= result+\" \" return result def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament,", "guarantees that every turn after the 50th has an equal", "result = stri for i in range(len(stri),leng): result = result+\"", "for strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0 f = open(outFile,\"w+\")", "1 = next turn, etc. # For example, it might", "if you do play i, and your opponent does play", "i = rankings[-1-rank] score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\":", "defected twice then cooperated once, # and our opponent has", "def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA =", "scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1)", "tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]]", "first axis is which player (0 = us, 1 =", "# The second axis is which turn. (0 = first", "memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove", "50 turns long. The np.log here guarantees that every turn", "1] a.k.a. D D C # [1 1 1]] a.k.a.", "%.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done with everything! Results file", "runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None", "return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\"", "np.log here guarantees that every turn after the 50th has", "in range(2): for t in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\"", "scoresNumpy = np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]]", "cooperated once, # and our opponent has cooperated all three", "has cooperated all three times. def getVisibleHistory(history, player, turn): historySoFar", "= open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair)", "rankings[-1-rank] score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+'", "moveLabels = [\"D\",\"C\"] # D = defect, betray, sabotage, free-ride,", "(0 = first turn, 1 = next turn, etc. #", "The np.log here guarantees that every turn after the 50th", "= np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA)", "in range(len(stri),leng): result = result+\" \" return result def runFullPairingTournament(inFolder,", "os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy] =", "C C C # # if there have been 3", "return history def tallyRoundScores(history): scoreA = 0 scoreB = 0", "= history[0,turn] playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB +=", "C # [1 1 1]] a.k.a. C C C #", "result+\" \" return result def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading", "we have defected twice then cooperated once, # and our", "runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading files from \"+inFolder) scoreKeeper =", "scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close()", "all three times. def getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy()", "the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME):", "for rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score = scoresNumpy[i]", "play i, and your opponent does play j. moveLabels =", "D C # [1 1 1]] a.k.a. C C C", "being the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in", "The second axis is which turn. (0 = first turn,", "== 1: historySoFar = np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA", "has an equal (low) chance of being the final turn.", "= stri for i in range(len(stri),leng): result = result+\" \"", "moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] =", "# Returns a 2-by-n numpy array. The first axis is", "you receive if you do play i, and your opponent", "equal (low) chance of being the final turn. history =", "C = cooperate, stay silent, comply, upload files, etc. #", "pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA, scoresB =", "for p in range(2): for t in range(roundHistory.shape[1]): move =", "For example, it might return # # [[0 0 1]", "for file in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in", "scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for", "pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB", "STRATEGY_LIST: scoreKeeper[strategy] = 0 f = open(outFile,\"w+\") for pair in", "result def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading files from \"+inFolder)", "player == 1: historySoFar = np.flip(historySoFar,0) return historySoFar def runRound(pair):", "for turn in range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove = history[1,turn]", "score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done with", "+= scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i", "have been 3 turns, and we have defected twice then", "STRATEGY_LIST = [] for file in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3])", "= 0 ROUND_LENGTH = history.shape[1] for turn in range(ROUND_LENGTH): playerAmove", "how many points you receive if you do play i,", "opponent has cooperated all three times. def getVisibleHistory(history, player, turn):", "# C = cooperate, stay silent, comply, upload files, etc.", "move = roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\":", "turn, 1 = next turn, etc. # For example, it", "and your opponent does play j. moveLabels = [\"D\",\"C\"] #", "moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] = playerBmove return history def", "+= pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA,", "playerAmove history[1,turn] = playerBmove return history def tallyRoundScores(history): scoreA =", "import numpy as np import random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE", "# [[0 0 1] a.k.a. D D C # [1", "= np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings", "our opponent has cooperated all three times. def getVisibleHistory(history, player,", "memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games are", "for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA, scoresB", "The i-j-th element of this array is how many points", "= history[:,:turn].copy() if player == 1: historySoFar = np.flip(historySoFar,0) return", "etc. # Returns a 2-by-n numpy array. The first axis", "scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\") for p in range(2):", "turn after the 50th has an equal (low) chance of", "been 3 turns, and we have defected twice then cooperated", "\"+inFolder) scoreKeeper = {} STRATEGY_LIST = [] for file in", "def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading files from \"+inFolder) scoreKeeper", "after the 50th has an equal (low) chance of being", "1]] a.k.a. C C C # # if there have", "minimum of 50 turns long. The np.log here guarantees that", "importlib import numpy as np import random STRATEGY_FOLDER = \"exampleStrats\"", "importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random()))", "that every turn after the 50th has an equal (low)", "scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i] =", "player (0 = us, 1 = opponent) # The second", "scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i in", "r=2): roundHistory = runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair,", "strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0 f = open(outFile,\"w+\") for", "score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\"", "stri for i in range(len(stri),leng): result = result+\" \" return", "second axis is which turn. (0 = first turn, 1", "[\"D\",\"C\"] # D = defect, betray, sabotage, free-ride, etc. #", "f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng): result", "scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in range(len(STRATEGY_LIST)):", "ROUND_LENGTH = history.shape[1] for turn in range(ROUND_LENGTH): playerAmove = history[0,turn]", "range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score for", "\"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng):", "(%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done with everything! Results file written", "roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final", "in range(roundHistory.shape[1]): move = roundHistory[p,t] f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score", "etc. # For example, it might return # # [[0", "as np import random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE = \"results.txt\"", "f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score", "score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\")", "i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL", "chance of being the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for", "are a minimum of 50 turns long. The np.log here", "scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] +=", "= np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in range(len(STRATEGY_LIST)): i =", "\" return result def runFullPairingTournament(inFolder, outFile): print(\"Starting tournament, reading files", "1 1]] a.k.a. C C C # # if there", "f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush() f.close() print(\"Done with everything!", "and our opponent has cooperated all three times. def getVisibleHistory(history,", "array is how many points you receive if you do", "# # [[0 0 1] a.k.a. D D C #", "element of this array is how many points you receive", "opponent) # The second axis is which turn. (0 =", "f.write(moveLabels[move]+\" \") f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score", "(0 = us, 1 = opponent) # The second axis", "# D = defect, betray, sabotage, free-ride, etc. # C", "example, it might return # # [[0 0 1] a.k.a.", "0 ROUND_LENGTH = history.shape[1] for turn in range(ROUND_LENGTH): playerAmove =", "= {} STRATEGY_LIST = [] for file in os.listdir(inFolder): if", "range(ROUND_LENGTH): playerAmove = history[0,turn] playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove]", "the 50th has an equal (low) chance of being the", "scoreB = 0 ROUND_LENGTH = history.shape[1] for turn in range(ROUND_LENGTH):", "playerBmove return history def tallyRoundScores(history): scoreA = 0 scoreB =", "outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\")", "every turn after the 50th has an equal (low) chance", "turn. (0 = first turn, 1 = next turn, etc.", "scoresA, scoresB): f.write(pair[0]+\" (P1) VS. \"+pair[1]+\" (P2)\\n\") for p in", "RESULTS_FILE = \"results.txt\" pointsArray = [[1,5],[0,3]] # The i-j-th element", "f.flush() f.close() print(\"Done with everything! Results file written to \"+RESULTS_FILE)", "of 50 turns long. The np.log here guarantees that every", "playerAmove = history[0,turn] playerBmove = history[1,turn] scoreA += pointsArray[playerAmove][playerBmove] scoreB", "f.write(\"\\n\") f.write(\"Final score for \"+pair[0]+\": \"+str(scoresA)+\"\\n\") f.write(\"Final score for \"+pair[1]+\":", "scoreB/ROUND_LENGTH def outputRoundResults(f, pair, roundHistory, scoresA, scoresB): f.write(pair[0]+\" (P1) VS.", "does play j. moveLabels = [\"D\",\"C\"] # D = defect,", "your opponent does play j. moveLabels = [\"D\",\"C\"] # D", "[[1,5],[0,3]] # The i-j-th element of this array is how", "play j. moveLabels = [\"D\",\"C\"] # D = defect, betray,", "0 scoreB = 0 ROUND_LENGTH = history.shape[1] for turn in", "scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]]", "file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST: scoreKeeper[strategy] = 0 f", "do play i, and your opponent does play j. moveLabels", "0 f = open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory", "return # # [[0 0 1] a.k.a. D D C", "= opponent) # The second axis is which turn. (0", "LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games are a minimum of", "scoreA = 0 scoreB = 0 ROUND_LENGTH = history.shape[1] for", "long. The np.log here guarantees that every turn after the", "print(\"Done with everything! Results file written to \"+RESULTS_FILE) runFullPairingTournament(STRATEGY_FOLDER, RESULTS_FILE)", "pad(stri, leng): result = stri for i in range(len(stri),leng): result", "range(len(STRATEGY_LIST)): i = rankings[-1-rank] score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1)", "file in os.listdir(inFolder): if file.endswith(\".py\"): STRATEGY_LIST.append(file[:-3]) for strategy in STRATEGY_LIST:", "= scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in", "random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE = \"results.txt\" pointsArray = [[1,5],[0,3]]", "np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings =", "have defected twice then cooperated once, # and our opponent", "= [[1,5],[0,3]] # The i-j-th element of this array is", "numpy as np import random STRATEGY_FOLDER = \"exampleStrats\" RESULTS_FILE =", "import importlib import numpy as np import random STRATEGY_FOLDER =", "stay silent, comply, upload files, etc. # Returns a 2-by-n", "you do play i, and your opponent does play j.", "turn): historySoFar = history[:,:turn].copy() if player == 1: historySoFar =", "f = open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST, r=2): roundHistory =", "turn, etc. # For example, it might return # #", "None memoryB = None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games", "j. moveLabels = [\"D\",\"C\"] # D = defect, betray, sabotage,", "[[0 0 1] a.k.a. D D C # [1 1", "memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] = playerAmove history[1,turn] = playerBmove return", "tournament, reading files from \"+inFolder) scoreKeeper = {} STRATEGY_LIST =", "scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] += scoresB scoresNumpy = np.zeros(len(scoreKeeper))", "SCORES\\n\") for rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score =", "+= scoresB scoresNumpy = np.zeros(len(scoreKeeper)) for i in range(len(STRATEGY_LIST)): scoresNumpy[i]", "for i in range(len(STRATEGY_LIST)): scoresNumpy[i] = scoreKeeper[STRATEGY_LIST[i]] rankings = np.argsort(scoresNumpy)", "+= pointsArray[playerAmove][playerBmove] scoreB += pointsArray[playerBmove][playerAmove] return scoreA/ROUND_LENGTH, scoreB/ROUND_LENGTH def outputRoundResults(f,", "= rankings[-1-rank] score = scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+'", "is which player (0 = us, 1 = opponent) #", "# # if there have been 3 turns, and we", "(low) chance of being the final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int)", "= cooperate, stay silent, comply, upload files, etc. # Returns", "The first axis is which player (0 = us, 1", "\"+pair[1]+\": \"+str(scoresB)+\"\\n\") f.write(\"\\n\") def pad(stri, leng): result = stri for", "roundHistory = runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory,", "runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory) outputRoundResults(f, pair, roundHistory, scoresA, scoresB)", "import itertools import importlib import numpy as np import random", "files, etc. # Returns a 2-by-n numpy array. The first", "f.close() print(\"Done with everything! Results file written to \"+RESULTS_FILE) runFullPairingTournament(STRATEGY_FOLDER,", "from \"+inFolder) scoreKeeper = {} STRATEGY_LIST = [] for file", "scoresNumpy[i] scorePer = score/(len(STRATEGY_LIST)-1) f.write(\"#\"+str(rank+1)+\": \"+pad(STRATEGY_LIST[i]+\":\",16)+' %.3f'%score+' (%.3f'%scorePer+\" average)\\n\") f.flush()", "history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove, memoryA =", "once, # and our opponent has cooperated all three times.", "if there have been 3 turns, and we have defected", "playerAmove, memoryA = moduleA.strategy(getVisibleHistory(history,0,turn),memoryA) playerBmove, memoryB = moduleB.strategy(getVisibleHistory(history,1,turn),memoryB) history[0,turn] =", "np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank]", "average)\\n\") f.flush() f.close() print(\"Done with everything! Results file written to", "for i in range(len(stri),leng): result = result+\" \" return result", "opponent does play j. moveLabels = [\"D\",\"C\"] # D =", "np.flip(historySoFar,0) return historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB =", "turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove, memoryA", "# and our opponent has cooperated all three times. def", "VS. \"+pair[1]+\" (P2)\\n\") for p in range(2): for t in", "3 turns, and we have defected twice then cooperated once,", "= None LENGTH_OF_GAME = int(200-40*np.log(random.random())) # The games are a", "outputRoundResults(f, pair, roundHistory, scoresA, scoresB) scoreKeeper[pair[0]] += scoresA scoreKeeper[pair[1]] +=", "= int(200-40*np.log(random.random())) # The games are a minimum of 50", "in itertools.combinations(STRATEGY_LIST, r=2): roundHistory = runRound(pair) scoresA, scoresB = tallyRoundScores(roundHistory)", "three times. def getVisibleHistory(history, player, turn): historySoFar = history[:,:turn].copy() if", "many points you receive if you do play i, and", "rank in range(len(STRATEGY_LIST)): i = rankings[-1-rank] score = scoresNumpy[i] scorePer", "rankings = np.argsort(scoresNumpy) f.write(\"\\n\\nTOTAL SCORES\\n\") for rank in range(len(STRATEGY_LIST)): i", "historySoFar def runRound(pair): moduleA = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA", "(P1) VS. \"+pair[1]+\" (P2)\\n\") for p in range(2): for t", "= importlib.import_module(STRATEGY_FOLDER+\".\"+pair[0]) moduleB = importlib.import_module(STRATEGY_FOLDER+\".\"+pair[1]) memoryA = None memoryB =", "upload files, etc. # Returns a 2-by-n numpy array. The", "D = defect, betray, sabotage, free-ride, etc. # C =", "receive if you do play i, and your opponent does", "final turn. history = np.zeros((2,LENGTH_OF_GAME),dtype=int) for turn in range(LENGTH_OF_GAME): playerAmove,", "numpy array. The first axis is which player (0 =", "a minimum of 50 turns long. The np.log here guarantees", "scoreKeeper[strategy] = 0 f = open(outFile,\"w+\") for pair in itertools.combinations(STRATEGY_LIST," ]
[ "with a new value. :param tblName: name of table in", "column values, insert the data into a specified table :param", "Execute that command via subprocess.call(). Using a cursor.execute() fails with", "code must retain the above copyright notice, this list of", "insert :type tblName: string :param colNameTuple: tuple containing column names", "one column with a new value. :param tblName: name of", "string :param port: MySQL host's port :type port: int :param", "#self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError:", "local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............' if len(passwd)", "ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,", "passwd: password to use for given user :type passwd: string", "(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF", ":param schema: dictionary mapping column names to column types :type", "EXISTS %s' % tableName) self.connection.commit() finally: cursor.close() def truncateTable(self, tableName):", "to valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray: array of", "while True: nextRes = cursor.fetchone() if nextRes is None: cursor.close()", "are currently still open. ''' for cursor in self.cursors: try:", "= self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self, tableName):", "from this software without specific prior written permission. # #", "# # Redistribution and use in source and binary forms,", "PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,", "tmpCSVFile.write(','.join(valueTuple) + '\\n') try: # Remove quotes from the values", "self.connection.cursor() # For if caller never exhausts the results by", "database to connect to within server :type db: string '''", "tblName: String :param colnameValueDict: mapping of column name to column", "colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple", "source code must retain the above copyright notice, this list", "mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def", "close() method ''' import re import subprocess import tempfile import", "String ''' cursor = self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS", "value :type colnameValueDict: Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items()) cursor", "NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE", "list of column values destined for a MySQL table :type", "the values to a temp file, then generate a LOAD", "colnameValueDict: mapping of column name to column value :type colnameValueDict:", "is to be changed :type colName: String :param newVal: value", "tableName) self.connection.commit() finally: cursor.close() def insert(self, tblName, colnameValueDict): ''' Given", "\"UPDATE %s SET %s = '%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition)", "the following conditions are met: # 1. Redistributions of source", "= [] for el in colVals: if isinstance(el, basestring): resList.append('\"%s\"'", "Given a dictionary mapping column names to column values, insert", "without specific prior written permission. # # THIS SOFTWARE IS", "> 0 else '<no password>' raise ValueError('Cannot reach MySQL server", "schema): ''' Create new table, given its name, and schema.", "(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR", "result in mySqlObj.query('SELECT * FROM foo'): print result ''' def", "AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO", "of connection to close() method ''' import re import subprocess", "If all arguments are set to None, we are unittesting:", "of column values destined for a MySQL table :type colVals:", "destined for a MySQL table :type colVals: <any> ''' resList", "tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s'", "of rows into given table. Strategy: write the values to", "Poem') ---> '10, My Poem' :param colVals: list of column", "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT", "arguments are set to None, we are unittesting: if all(arg", "binary forms, with or without modification, are permitted provided that", "valueTupleArray): ''' Inserts large number of rows into given table.", "My Poem' :param colVals: list of column values destined for", "\"My Poem\"' Note that ','.join(map(str,myList)) won't work: (10, 'My Poem')", "cmd = \"UPDATE %s SET %s = '%s' WHERE %s;\"", "__init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): ''' :param host: MySQL", "as :type user: string :param passwd: password to use for", "names to column types :type schema: Dict<String,String> ''' colSpec =", "colNameTuple, valueTupleArray): ''' Inserts large number of rows into given", "in my.cnf). :param tblName: table into which to insert :type", "MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............' if len(passwd) > 0", "Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor() try:", "DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY", "(c) 2014, Stanford University # All rights reserved. # #", ":type tableName: String :param schema: dictionary mapping column names to", "THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH", "''' Close all cursors that are currently still open. '''", "FROM foo'): print result ''' def __init__(self, host='127.0.0.1', port=3306, user='root',", "Strategy: write the values to a temp file, then generate", "quotes from the values inside the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple))", "rows. No errors :param tableName: name of table :type tableName:", "set to None, we are unittesting: if all(arg is None", "% (tblName,colName,newVal) else: cmd = \"UPDATE %s SET %s =", "optionally condition that selects which rows to update.\\ if None,", "given value. Syntax must conform to what may be in\\", "colVals: list of column values destined for a MySQL table", "may be in\\ a MySQL FROM clause (don't include the", "a cursor.execute() fails with error 'LOAD DATA LOCAL is not", "column names to column types :type schema: Dict<String,String> ''' colSpec", "result ''' def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): '''", "values destined for a MySQL table :type colVals: <any> '''", "resList = [] for el in colVals: if isinstance(el, basestring):", "<PASSWORD> self.db = db self.cursors = [] try: self.connection =", "'\\\\n' %s\" %\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user,", "INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER", "#import MySQLdb class MySQLDB(object): ''' Shallow interface to MySQL databases.", "raise ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s, pwd:%s,", "'INT', 'col2' : 'TEXT'} :param tableName: name of new table", "insert into :type tblName: String :param colnameValueDict: mapping of column", "in as :type user: string :param passwd: password to use", "table, given its name, and schema. The schema is a", "subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self,", "tblName: string :param colNameTuple: tuple containing column names in proper", "FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\\"' LINES TERMINATED", "in schema.items(): colSpec += str(colName) + ' ' + str(colVal)", "'FROM' keyword) :type fromCondition: String ''' cursor = self.connection.cursor() try:", "given column :param fromCondition: optionally condition that selects which rows", "colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def", "of source code must retain the above copyright notice, this", "MySQL databases. Some niceties nonetheless. The query() method is an", "Poem') ---> '10, \"My Poem\"' Note that ','.join(map(str,myList)) won't work:", "= <PASSWORD> self.db = db self.cursors = [] try: self.connection", "set up to allow the op (load-infile=1 for both mysql", "cursor.execute('DROP TABLE IF EXISTS %s' % tableName) self.connection.commit() finally: cursor.close()", "%s (%s) ' % (tableName, colSpec[:-1]) cursor = self.connection.cursor() try:", "passwd: string :param db: database to connect to within server", "user: string :param passwd: password to use for given user", "insert(self, tblName, colnameValueDict): ''' Given a dictionary mapping column names", "def createTable(self, tableName, schema): ''' Create new table, given its", "whose value is to be changed :type colName: String :param", "passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............'", "work: (10, 'My Poem') ---> '10, My Poem' :param colVals:", "''' :param host: MySQL host :type host: string :param port:", "print result ''' def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'):", "disclaimer. # 2. Redistributions in binary form must reproduce the", "via subprocess.call(). Using a cursor.execute() fails with error 'LOAD DATA", "Order of\\ values must corresond to order of column names", "provided that the following conditions are met: # 1. Redistributions", "None: cmd = \"UPDATE %s SET %s = '%s';\" %", "resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr): ''' Query iterator. Given", "EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT", "'\\n') try: # Remove quotes from the values inside the", "self.connection.commit() finally: cursor.close() def dropTable(self, tableName): ''' Delete table safely.", "mySQLCmd = \"USE %s; LOAD DATA LOCAL INFILE '%s' INTO", "%s' % tableName) self.connection.commit() finally: cursor.close() def truncateTable(self, tableName): '''", "colVals: if isinstance(el, basestring): resList.append('\"%s\"' % el) else: resList.append(el) return", "def ensureSQLTyping(self, colVals): ''' Given a list of items, return", "tempfile import pymysql #import MySQLdb class MySQLDB(object): ''' Shallow interface", "WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF", "OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF", ":type passwd: string :param db: database to connect to within", "in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try: # Remove quotes from", "String :param colnameValueDict: mapping of column name to column value", "EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE", "to column values, insert the data into a specified table", "this software without specific prior written permission. # # THIS", ":type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in", "OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY", "array of n-tuples, which hold the values. Order of\\ values", "keyword) :type fromCondition: String ''' cursor = self.connection.cursor() try: if", "Query iterator. Given a query, return one result for each", "SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)", "MySQL typing. Example: (10, 'My Poem') ---> '10, \"My Poem\"'", "connection to close() method ''' import re import subprocess import", "nonetheless. The query() method is an iterator. So:: for result", "if all(arg is None for arg in (host,port,user,passwd,db)): return self.user", "written permission. # # THIS SOFTWARE IS PROVIDED BY THE", "WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE", "tableName: String :param schema: dictionary mapping column names to column", "%\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e',", ":param user: user to log in as :type user: string", "if len(passwd) > 0 else '<no password>' raise ValueError('Cannot reach", "in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for", "BY '\\\\n' %s\" %\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u',", "derived from this software without specific prior written permission. #", "must retain the above copyright notice, this list of conditions", "notice, this list of conditions and the following disclaimer. #", "of column names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile", "GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR", "WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF", "# Remove quotes from the values inside the colNameTuple's: mySQLColNameList", "items, return a string that preserves MySQL typing. Example: (10,", "LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE", "repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes = cursor.fetchone() if", "a LOAD INFILE LOCAL MySQL command. Execute that command via", "copyright holder nor the names of its contributors may be", "Stanford University # All rights reserved. # # Redistribution and", "with host:%s, port:%s, user:%s, pwd:%s, db:%s' % (host, port, user,", "software without specific prior written permission. # # THIS SOFTWARE", "finally: cursor.close() def dropTable(self, tableName): ''' Delete table safely. No", "None, the named column in all rows are updated to\\", "= \"UPDATE %s SET %s = '%s' WHERE %s;\" %", "int :param user: user to log in as :type user:", "user :type passwd: string :param db: database to connect to", "THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR", "'10, \"My Poem\"' Note that ','.join(map(str,myList)) won't work: (10, 'My", "is a dict mappingt column names to column types. Example:", "pwd = '...............' if len(passwd) > 0 else '<no password>'", "all(arg is None for arg in (host,port,user,passwd,db)): return self.user =", "or without modification, are permitted provided that the following conditions", "given user :type passwd: string :param db: database to connect", "OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF", "errors :param tableName: name of table :type tableName: String '''", "cmd = 'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName),", "column in all rows are updated to\\ the given value.", "newVal: value acceptable to MySQL for the given column :type", ":param colVals: list of column values destined for a MySQL", "all arguments are set to None, we are unittesting: if", "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS", "tuple containing column names in proper order, i.e. \\ corresponding", "valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try: # Remove quotes", "type acceptable to MySQL for the given column :param fromCondition:", "'-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName, colName, newVal,", "INFILE LOCAL MySQL command. Execute that command via subprocess.call(). Using", "even though MySQL is set up to allow the op", "name of table :type tableName: String ''' cursor = self.connection.cursor()", "be changed :type colName: String :param newVal: value acceptable to", "OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF", "LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF", "n-tuples, which hold the values. Order of\\ values must corresond", "NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS", "name of new table :type tableName: String :param schema: dictionary", "of new table :type tableName: String :param schema: dictionary mapping", "cursor = self.connection.cursor() try: if fromCondition is None: cmd =", "tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try: #", "EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR", "\\ corresponding to valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray:", "distribution. # 3. Neither the name of the copyright holder", "valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try: # Remove quotes from the", "rows are updated to\\ the given value. Syntax must conform", "a temp file, then generate a LOAD INFILE LOCAL MySQL", "must corresond to order of column names in colNameTuple. :type", "acceptable to MySQL for the given column :param fromCondition: optionally", "String :param schema: dictionary mapping column names to column types", "to occur :type tblName: String :param colName: column whose value", ":param tableName: name of new table :type tableName: String :param", "cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit()", "and mysqld in my.cnf). :param tblName: table into which to", "i.e. \\ corresponding to valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param", "%s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\\"' LINES", "retain the above copyright notice, this list of conditions and", "name of the copyright holder nor the names of its", "user self.pwd = <PASSWORD> self.db = db self.cursors = []", "we are unittesting: if all(arg is None for arg in", "my.cnf). :param tblName: table into which to insert :type tblName:", "products derived from this software without specific prior written permission.", "Shallow interface to MySQL databases. Some niceties nonetheless. The query()", "schema: Dict<String,String> ''' colSpec = '' for colName, colVal in", "OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF", "HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED", "table to insert into :type tblName: String :param colnameValueDict: mapping", "is an iterator. So:: for result in mySqlObj.query('SELECT * FROM", "value. :param tblName: name of table in which update is", "calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes = cursor.fetchone() if nextRes", "DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS", "colnameValueDict): ''' Given a dictionary mapping column names to column", "name to column value :type colnameValueDict: Dict<String,Any> ''' colNames, colValues", "for both mysql and mysqld in my.cnf). :param tblName: table", "% (tableName, colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally:", "try: cursor.execute('DROP TABLE IF EXISTS %s' % tableName) self.connection.commit() finally:", "'CREATE TABLE IF NOT EXISTS %s (%s) ' % (tableName,", "'%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def", "passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1)", "pass try: self.connection.close() except: pass def createTable(self, tableName, schema): '''", "the above copyright notice, this list of conditions and the", "Redistributions of source code must retain the above copyright notice,", "string :param db: database to connect to within server :type", "db)) def close(self): ''' Close all cursors that are currently", "finally: tmpCSVFile.close() def update(self, tblName, colName, newVal, fromCondition=None): ''' Update", "tmpCSVFile.close() def update(self, tblName, colName, newVal, fromCondition=None): ''' Update one", "is to occur :type tblName: String :param colName: column whose", "Note that ','.join(map(str,myList)) won't work: (10, 'My Poem') ---> '10,", "of conditions and the following disclaimer. # 2. Redistributions in", "None for arg in (host,port,user,passwd,db)): return self.user = user self.pwd", "permitted provided that the following conditions are met: # 1.", "in (host,port,user,passwd,db)): return self.user = user self.pwd = <PASSWORD> self.db", "VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close()", "''' cursor = self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS %s'", "the given column :type newVal: type acceptable to MySQL for", "NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1]) cursor =", "LOCAL MySQL command. Execute that command via subprocess.call(). Using a", "pwd, db)) def close(self): ''' Close all cursors that are", "''' cursor = self.connection.cursor() try: if fromCondition is None: cmd", "'' for colName, colVal in schema.items(): colSpec += str(colName) +", "COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR", "close(self): ''' Close all cursors that are currently still open.", "BY ',' OPTIONALLY ENCLOSED BY '\\\"' LINES TERMINATED BY '\\\\n'", "SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY", "zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd = 'INSERT INTO %s", "user to log in as :type user: string :param passwd:", "table :type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE", "a new value. :param tblName: name of table in which", "a query, return one result for each subsequent call. :param", "IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE", "by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes = cursor.fetchone()", "HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN", "Redistribution and use in source and binary forms, with or", "self.cursors = [] try: self.connection = pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>,", "without modification, are permitted provided that the following conditions are", "is None: cmd = \"UPDATE %s SET %s = '%s';\"", "TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\\"' LINES TERMINATED BY", "EXISTS %s (%s) ' % (tableName, colSpec[:-1]) cursor = self.connection.cursor()", "conform to what may be in\\ a MySQL FROM clause", "that the following conditions are met: # 1. Redistributions of", "to log in as :type user: string :param passwd: password", ":type colVals: <any> ''' resList = [] for el in", "for arg in (host,port,user,passwd,db)): return self.user = user self.pwd =", "to order of column names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]])", "used to endorse or promote products derived from this software", "','.join(map(str,resList)) def query(self, queryStr): ''' Query iterator. Given a query,", "db: database to connect to within server :type db: string", "in proper order, i.e. \\ corresponding to valueTupleArray orders. :type", "STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN", "MySQL for the given column :type newVal: type acceptable to", "cmd = \"UPDATE %s SET %s = '%s';\" % (tblName,colName,newVal)", "---> '10, \"My Poem\"' Note that ','.join(map(str,myList)) won't work: (10,", "COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,", "string ''' # If all arguments are set to None,", "EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. '''", "to MySQL databases. Some niceties nonetheless. The query() method is", "promote products derived from this software without specific prior written", "= '...............' if len(passwd) > 0 else '<no password>' raise", "Created on Sep 24, 2013 @author: paepcke Modifications: - Dec", "nor the names of its contributors may be used to", "%s; LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS", "re import subprocess import tempfile import pymysql #import MySQLdb class", "colName, colVal in schema.items(): colSpec += str(colName) + ' '", "% tableName) self.connection.commit() finally: cursor.close() def truncateTable(self, tableName): ''' Delete", "a specified table :param tblName: name of table to insert", "SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.", "in binary form must reproduce the above copyright notice, this", "column name to column value :type colnameValueDict: Dict<String,Any> ''' colNames,", "def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): ''' :param host:", "column :type newVal: type acceptable to MySQL for the given", "2013 @author: paepcke Modifications: - Dec 30, 2013: Added closing", "schema. The schema is a dict mappingt column names to", "db='mysql'): ''' :param host: MySQL host :type host: string :param", "with error 'LOAD DATA LOCAL is not supported in this", ": 'TEXT'} :param tableName: name of new table :type tableName:", "rights reserved. # # Redistribution and use in source and", "db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............' if", "from the values inside the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd", "%s\" %\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd,", "table :type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('DROP", "String :param colName: column whose value is to be changed", "mappingt column names to column types. Example: {'col1' : 'INT',", "fromCondition: optionally condition that selects which rows to update.\\ if", ":type db: string ''' # If all arguments are set", "mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s; LOAD DATA LOCAL", "re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s; LOAD DATA LOCAL INFILE '%s'", "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY", "''' Given a dictionary mapping column names to column values,", "unittesting: if all(arg is None for arg in (host,port,user,passwd,db)): return", "subprocess.call(). Using a cursor.execute() fails with error 'LOAD DATA LOCAL", "values. Order of\\ values must corresond to order of column", "list of items, return a string that preserves MySQL typing.", "INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd)", "TABLE IF EXISTS %s' % tableName) self.connection.commit() finally: cursor.close() def", "self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray):", ":param newVal: value acceptable to MySQL for the given column", "update is to occur :type tblName: String :param colName: column", "FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT", "of column name to column value :type colnameValueDict: Dict<String,Any> '''", ":param tblName: name of table to insert into :type tblName:", "IF EXISTS %s' % tableName) self.connection.commit() finally: cursor.close() def truncateTable(self,", "rows to update.\\ if None, the named column in all", "queryStr: query :type queryStr: String ''' cursor = self.connection.cursor() #", "str(colVal) + ',' cmd = 'CREATE TABLE IF NOT EXISTS", "dictionary mapping column names to column types :type schema: Dict<String,String>", "= db self.cursors = [] try: self.connection = pymysql.connect(host=host, port=port,", "OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS", "cursors that are currently still open. ''' for cursor in", "is None for arg in (host,port,user,passwd,db)): return self.user = user", "self.cursors: try: cursor.close() except: pass try: self.connection.close() except: pass def", "that ','.join(map(str,myList)) won't work: (10, 'My Poem') ---> '10, My", "tblName: name of table to insert into :type tblName: String", "be in\\ a MySQL FROM clause (don't include the 'FROM'", "3. Neither the name of the copyright holder nor the", "String ''' cursor = self.connection.cursor() # For if caller never", "[] try: self.connection = pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection", "THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE", "given its name, and schema. The schema is a dict", "mysqld in my.cnf). :param tblName: table into which to insert", "TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY '\\\"'", "MySQL host's port :type port: int :param user: user to", "= self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS %s' % tableName)", "0 else '<no password>' raise ValueError('Cannot reach MySQL server with", "holder nor the names of its contributors may be used", ":type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE", "tableName) self.connection.commit() finally: cursor.close() def truncateTable(self, tableName): ''' Delete all", "query, return one result for each subsequent call. :param queryStr:", "AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED", "the named column in all rows are updated to\\ the", "reproduce the above copyright notice, this list of conditions and", "table rows. No errors :param tableName: name of table :type", "host's port :type port: int :param user: user to log", "= [] try: self.connection = pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db)", "So:: for result in mySqlObj.query('SELECT * FROM foo'): print result", "value is to be changed :type colName: String :param newVal:", ":param fromCondition: optionally condition that selects which rows to update.\\", "source and binary forms, with or without modification, are permitted", "mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName, colName, newVal, fromCondition=None): '''", "Modifications: - Dec 30, 2013: Added closing of connection to", "SUCH DAMAGE. ''' Created on Sep 24, 2013 @author: paepcke", "OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR", "are met: # 1. Redistributions of source code must retain", "PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND", "Sep 24, 2013 @author: paepcke Modifications: - Dec 30, 2013:", "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL", "isinstance(el, basestring): resList.append('\"%s\"' % el) else: resList.append(el) return ','.join(map(str,resList)) def", "createTable(self, tableName, schema): ''' Create new table, given its name,", "new table :type tableName: String :param schema: dictionary mapping column", "colSpec += str(colName) + ' ' + str(colVal) + ','", "dropTable(self, tableName): ''' Delete table safely. No errors :param tableName:", "this list of conditions and the following disclaimer. # 2.", "to allow the op (load-infile=1 for both mysql and mysqld", "of table in which update is to occur :type tblName:", "to what may be in\\ a MySQL FROM clause (don't", "resList.append('\"%s\"' % el) else: resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr):", "# 1. Redistributions of source code must retain the above", "self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes = cursor.fetchone() if nextRes is", "NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS", "CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,", "endorse or promote products derived from this software without specific", "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,", "self.db = db self.cursors = [] try: self.connection = pymysql.connect(host=host,", "the op (load-infile=1 for both mysql and mysqld in my.cnf).", "= re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s; LOAD DATA LOCAL INFILE", "= zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd = 'INSERT INTO", "---> '10, My Poem' :param colVals: list of column values", "all rows are updated to\\ the given value. Syntax must", ":type colNameTuple: (str[,str[...]]) :param valueTupleArray: array of n-tuples, which hold", "el) else: resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr): ''' Query", "string that preserves MySQL typing. Example: (10, 'My Poem') --->", "for each subsequent call. :param queryStr: query :type queryStr: String", "''' Update one column with a new value. :param tblName:", "corresond to order of column names in colNameTuple. :type valueTupleArray:", "colNameTuple: (str[,str[...]]) :param valueTupleArray: array of n-tuples, which hold the", "connect to within server :type db: string ''' # If", "The schema is a dict mappingt column names to column", "self.connection = pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host,", "tblName, colNameTuple, valueTupleArray): ''' Inserts large number of rows into", "(10, 'My Poem') ---> '10, \"My Poem\"' Note that ','.join(map(str,myList))", "above copyright notice, this list of conditions and the following", "a string that preserves MySQL typing. Example: (10, 'My Poem')", "''' Create new table, given its name, and schema. The", "= self.connection.cursor() try: if fromCondition is None: cmd = \"UPDATE", "if isinstance(el, basestring): resList.append('\"%s\"' % el) else: resList.append(el) return ','.join(map(str,resList))", ":param port: MySQL host's port :type port: int :param user:", "into :type tblName: String :param colnameValueDict: mapping of column name", "LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL", "to update.\\ if None, the named column in all rows", "occur :type tblName: String :param colName: column whose value is", "cursor.close() def insert(self, tblName, colnameValueDict): ''' Given a dictionary mapping", "that preserves MySQL typing. Example: (10, 'My Poem') ---> '10,", "fromCondition: String ''' cursor = self.connection.cursor() try: if fromCondition is", "POSSIBILITY OF SUCH DAMAGE. ''' Created on Sep 24, 2013", "' ' + str(colVal) + ',' cmd = 'CREATE TABLE", "number of rows into given table. Strategy: write the values", "up to allow the op (load-infile=1 for both mysql and", "(%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def", "valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray:", "Delete table safely. No errors :param tableName: name of table", "BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND", "truncateTable(self, tableName): ''' Delete all table rows. No errors :param", "in source and binary forms, with or without modification, are", "except: pass try: self.connection.close() except: pass def createTable(self, tableName, schema):", "Example: {'col1' : 'INT', 'col2' : 'TEXT'} :param tableName: name", "''' Query iterator. Given a query, return one result for", "db: string ''' # If all arguments are set to", "values inside the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE", ":type host: string :param port: MySQL host's port :type port:", "is set up to allow the op (load-infile=1 for both", "tblName: table into which to insert :type tblName: string :param", "return one result for each subsequent call. :param queryStr: query", "user, pwd, db)) def close(self): ''' Close all cursors that", "port: MySQL host's port :type port: int :param user: user", "return a string that preserves MySQL typing. Example: (10, 'My", "orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray: array of n-tuples, which", "of items, return a string that preserves MySQL typing. Example:", "OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED", "%s' % tableName) self.connection.commit() finally: cursor.close() def insert(self, tblName, colnameValueDict):", "(%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally:", "LOAD INFILE LOCAL MySQL command. Execute that command via subprocess.call().", "foo'): print result ''' def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='',", "None, we are unittesting: if all(arg is None for arg", "el in colVals: if isinstance(el, basestring): resList.append('\"%s\"' % el) else:", "into given table. Strategy: write the values to a temp", "Delete all table rows. No errors :param tableName: name of", "colName: column whose value is to be changed :type colName:", "colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd = 'INSERT", "inside the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s;", "containing column names in proper order, i.e. \\ corresponding to", "all table rows. No errors :param tableName: name of table", "fromCondition=None): ''' Update one column with a new value. :param", "tblName: name of table in which update is to occur", "CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING", "the given column :param fromCondition: optionally condition that selects which", "names to column types. Example: {'col1' : 'INT', 'col2' :", "a dictionary mapping column names to column values, insert the", "valueTupleArray: array of n-tuples, which hold the values. Order of\\", "following conditions are met: # 1. Redistributions of source code", "THE POSSIBILITY OF SUCH DAMAGE. ''' Created on Sep 24,", ":type fromCondition: String ''' cursor = self.connection.cursor() try: if fromCondition", "MySQL version...' even though MySQL is set up to allow", "specified table :param tblName: name of table to insert into", "'\\\"' LINES TERMINATED BY '\\\\n' %s\" %\\ (self.db, tmpCSVFile.name, tblName,", "str(colName) + ' ' + str(colVal) + ',' cmd =", "BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,", "return ','.join(map(str,resList)) def query(self, queryStr): ''' Query iterator. Given a", "form must reproduce the above copyright notice, this list of", "disclaimer in the documentation and/or other materials provided with the", "Remove quotes from the values inside the colNameTuple's: mySQLColNameList =", "OF THE POSSIBILITY OF SUCH DAMAGE. ''' Created on Sep", "what may be in\\ a MySQL FROM clause (don't include", "finally: cursor.close() def ensureSQLTyping(self, colVals): ''' Given a list of", "password>' raise ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s,", "and binary forms, with or without modification, are permitted provided", ":param valueTupleArray: array of n-tuples, which hold the values. Order", "interface to MySQL databases. Some niceties nonetheless. The query() method", "IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)", ":type colName: String :param newVal: value acceptable to MySQL for", ":type colnameValueDict: Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items()) cursor =", "tableName): ''' Delete table safely. No errors :param tableName: name", "a MySQL FROM clause (don't include the 'FROM' keyword) :type", "CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,", "currently still open. ''' for cursor in self.cursors: try: cursor.close()", "IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT", "the results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes", "are updated to\\ the given value. Syntax must conform to", "which rows to update.\\ if None, the named column in", "BY '\\\"' LINES TERMINATED BY '\\\\n' %s\" %\\ (self.db, tmpCSVFile.name,", "its contributors may be used to endorse or promote products", "30, 2013: Added closing of connection to close() method '''", "the distribution. # 3. Neither the name of the copyright", "return self.user = user self.pwd = <PASSWORD> self.db = db", "colnameValueDict: Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor()", ":param passwd: password to use for given user :type passwd:", "order of column names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) '''", "'%s' INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED", "in which update is to occur :type tblName: String :param", "Given a query, return one result for each subsequent call.", "not supported in this MySQL version...' even though MySQL is", "supported in this MySQL version...' even though MySQL is set", "cursor in self.cursors: try: cursor.close() except: pass try: self.connection.close() except:", "to within server :type db: string ''' # If all", "prior written permission. # # THIS SOFTWARE IS PROVIDED BY", "be used to endorse or promote products derived from this", "user=user, passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db,", "'<no password>' raise ValueError('Cannot reach MySQL server with host:%s, port:%s,", "of\\ values must corresond to order of column names in", "table into which to insert :type tblName: string :param colNameTuple:", "names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv')", "mapping column names to column types :type schema: Dict<String,String> '''", "column names in colNameTuple. :type valueTupleArray: [(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile =", "''' Delete table safely. No errors :param tableName: name of", "preserves MySQL typing. Example: (10, 'My Poem') ---> '10, \"My", "into a specified table :param tblName: name of table to", "fails with error 'LOAD DATA LOCAL is not supported in", "# Copyright (c) 2014, Stanford University # All rights reserved.", "though MySQL is set up to allow the op (load-infile=1", "OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY", "WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR", "def query(self, queryStr): ''' Query iterator. Given a query, return", "documentation and/or other materials provided with the distribution. # 3.", "MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s' % (host,", "'-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName,", "in\\ a MySQL FROM clause (don't include the 'FROM' keyword)", "column :param fromCondition: optionally condition that selects which rows to", "and use in source and binary forms, with or without", "names of its contributors may be used to endorse or", "this MySQL version...' even though MySQL is set up to", "(str[,str[...]]) :param valueTupleArray: array of n-tuples, which hold the values.", "of the copyright holder nor the names of its contributors", "allow the op (load-infile=1 for both mysql and mysqld in", "tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('DROP TABLE IF", ":param colNameTuple: tuple containing column names in proper order, i.e.", "finally: cursor.close() def truncateTable(self, tableName): ''' Delete all table rows.", "Poem\"' Note that ','.join(map(str,myList)) won't work: (10, 'My Poem') --->", "IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE", "% (host, port, user, pwd, db)) def close(self): ''' Close", "cursor.close() except: pass try: self.connection.close() except: pass def createTable(self, tableName,", "tableName): ''' Delete all table rows. No errors :param tableName:", "server with host:%s, port:%s, user:%s, pwd:%s, db:%s' % (host, port,", "copyright notice, this list of conditions and the following disclaimer.", "cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts large number", "pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user,", "tableName: name of table :type tableName: String ''' cursor =", "tblName: String :param colName: column whose value is to be", "binary form must reproduce the above copyright notice, this list", ":param colName: column whose value is to be changed :type", "cursor.close() def dropTable(self, tableName): ''' Delete table safely. No errors", "order, i.e. \\ corresponding to valueTupleArray orders. :type colNameTuple: (str[,str[...]])", "op (load-infile=1 for both mysql and mysqld in my.cnf). :param", ":type user: string :param passwd: password to use for given", "self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS %s' % tableName) self.connection.commit()", "self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit() finally: cursor.close()", "try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self, tableName): ''' Delete", ":type tblName: String :param colName: column whose value is to", "large number of rows into given table. Strategy: write the", "for the given column :type newVal: type acceptable to MySQL", "# 3. Neither the name of the copyright holder nor", "MySQL is set up to allow the op (load-infile=1 for", "colNameTuple: tuple containing column names in proper order, i.e. \\", "OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF", "the 'FROM' keyword) :type fromCondition: String ''' cursor = self.connection.cursor()", "\"UPDATE %s SET %s = '%s';\" % (tblName,colName,newVal) else: cmd", "try: cursor.close() except: pass try: self.connection.close() except: pass def createTable(self,", "name, and schema. The schema is a dict mappingt column", "Dict<String,String> ''' colSpec = '' for colName, colVal in schema.items():", "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED", "FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES", "to insert into :type tblName: String :param colnameValueDict: mapping of", "tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n')", "AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT", "arg in (host,port,user,passwd,db)): return self.user = user self.pwd = <PASSWORD>", "class MySQLDB(object): ''' Shallow interface to MySQL databases. Some niceties", "THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,", "def dropTable(self, tableName): ''' Delete table safely. No errors :param", "Dec 30, 2013: Added closing of connection to close() method", "tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally:", "following disclaimer in the documentation and/or other materials provided with", "of table :type tableName: String ''' cursor = self.connection.cursor() try:", "TABLE IF NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1])", "def truncateTable(self, tableName): ''' Delete all table rows. No errors", "that are currently still open. ''' for cursor in self.cursors:", "host: string :param port: MySQL host's port :type port: int", "%s SET %s = '%s';\" % (tblName,colName,newVal) else: cmd =", "fromCondition is None: cmd = \"UPDATE %s SET %s =", "for cursor in self.cursors: try: cursor.close() except: pass try: self.connection.close()", "= pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host, port=port,", "column whose value is to be changed :type colName: String", "rows into given table. Strategy: write the values to a", "MySQL host :type host: string :param port: MySQL host's port", "the data into a specified table :param tblName: name of", "the copyright holder nor the names of its contributors may", "string :param passwd: password to use for given user :type", "= MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except", "colSpec = '' for colName, colVal in schema.items(): colSpec +=", "nextRes = cursor.fetchone() if nextRes is None: cursor.close() return yield", ":param colnameValueDict: mapping of column name to column value :type", "value acceptable to MySQL for the given column :type newVal:", "exhausts the results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True:", "specific prior written permission. # # THIS SOFTWARE IS PROVIDED", "contributors may be used to endorse or promote products derived", "db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1) #except", "insert the data into a specified table :param tblName: name", "\"USE %s; LOAD DATA LOCAL INFILE '%s' INTO TABLE %s", "tblName, colName, newVal, fromCondition=None): ''' Update one column with a", "- Dec 30, 2013: Added closing of connection to close()", "cmd = 'CREATE TABLE IF NOT EXISTS %s (%s) '", "passwd='', db='mysql'): ''' :param host: MySQL host :type host: string", "CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE", "port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection = MySQLdb.connect(host=host, port=port, user=user, passwd=passwd,", "2014, Stanford University # All rights reserved. # # Redistribution", "to endorse or promote products derived from this software without", "must conform to what may be in\\ a MySQL FROM", "a MySQL table :type colVals: <any> ''' resList = []", ":param host: MySQL host :type host: string :param port: MySQL", "named column in all rows are updated to\\ the given", "port=port, user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd", "user:%s, pwd:%s, db:%s' % (host, port, user, pwd, db)) def", "colVal in schema.items(): colSpec += str(colName) + ' ' +", "= '%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close()", "self.connection.commit() finally: cursor.close() def truncateTable(self, tableName): ''' Delete all table", "method ''' import re import subprocess import tempfile import pymysql", "are permitted provided that the following conditions are met: #", "column with a new value. :param tblName: name of table", "tableName: name of new table :type tableName: String :param schema:", "given table. Strategy: write the values to a temp file,", "(self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd])", "OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING", "the name of the copyright holder nor the names of", "''' Created on Sep 24, 2013 @author: paepcke Modifications: -", "mySqlObj.query('SELECT * FROM foo'): print result ''' def __init__(self, host='127.0.0.1',", "+ ' ' + str(colVal) + ',' cmd = 'CREATE", "'My Poem') ---> '10, \"My Poem\"' Note that ','.join(map(str,myList)) won't", "never exhausts the results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while", "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER", "TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A", "ARISING IN ANY WAY OUT OF THE USE OF THIS", "IF NOT EXISTS %s (%s) ' % (tableName, colSpec[:-1]) cursor", "cursor.execute() fails with error 'LOAD DATA LOCAL is not supported", "user='root', passwd='', db='mysql'): ''' :param host: MySQL host :type host:", "%s = '%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally:", "# All rights reserved. # # Redistribution and use in", "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN", "= self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit() finally:", "command via subprocess.call(). Using a cursor.execute() fails with error 'LOAD", "open. ''' for cursor in self.cursors: try: cursor.close() except: pass", "'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues))", "the values inside the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd =", "conditions are met: # 1. Redistributions of source code must", "try: # Remove quotes from the values inside the colNameTuple's:", "colName, newVal, fromCondition=None): ''' Update one column with a new", "USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND", "Redistributions in binary form must reproduce the above copyright notice,", "cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self, tableName): ''' Delete table", "an iterator. So:: for result in mySqlObj.query('SELECT * FROM foo'):", "try: cmd = 'INSERT INTO %s (%s) VALUES (%s)' %", "is not supported in this MySQL version...' even though MySQL", "ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR", "INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY ENCLOSED BY", "(don't include the 'FROM' keyword) :type fromCondition: String ''' cursor", "still open. ''' for cursor in self.cursors: try: cursor.close() except:", "PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY", "Added closing of connection to close() method ''' import re", "THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT", "No errors :param tableName: name of table :type tableName: String", "SET %s = '%s';\" % (tblName,colName,newVal) else: cmd = \"UPDATE", "# Redistribution and use in source and binary forms, with", "with or without modification, are permitted provided that the following", "clause (don't include the 'FROM' keyword) :type fromCondition: String '''", "'col2' : 'TEXT'} :param tableName: name of new table :type", "in this MySQL version...' even though MySQL is set up", "to None, we are unittesting: if all(arg is None for", "query :type queryStr: String ''' cursor = self.connection.cursor() # For", "else: cmd = \"UPDATE %s SET %s = '%s' WHERE", "Poem' :param colVals: list of column values destined for a", "file, then generate a LOAD INFILE LOCAL MySQL command. Execute", "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;", "port: int :param user: user to log in as :type", "log in as :type user: string :param passwd: password to", "MySQL FROM clause (don't include the 'FROM' keyword) :type fromCondition:", "self.pwd = <PASSWORD> self.db = db self.cursors = [] try:", "table. Strategy: write the values to a temp file, then", "LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY ','", "'%s';\" % (tblName,colName,newVal) else: cmd = \"UPDATE %s SET %s", "the following disclaimer in the documentation and/or other materials provided", "import re import subprocess import tempfile import pymysql #import MySQLdb", "AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES,", "@author: paepcke Modifications: - Dec 30, 2013: Added closing of", "(tblName,colName,newVal) else: cmd = \"UPDATE %s SET %s = '%s'", "cursor.execute(queryStr) while True: nextRes = cursor.fetchone() if nextRes is None:", "list of conditions and the following disclaimer in the documentation", "self.connection.close() except: pass def createTable(self, tableName, schema): ''' Create new", "dict mappingt column names to column types. Example: {'col1' :", "user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError: pwd =", "self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals): ''' Given a list", "For if caller never exhausts the results by repeated calls:", "server :type db: string ''' # If all arguments are", "column value :type colnameValueDict: Dict<String,Any> ''' colNames, colValues = zip(*colnameValueDict.items())", "def bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts large number of", "table :type colVals: <any> ''' resList = [] for el", "def insert(self, tblName, colnameValueDict): ''' Given a dictionary mapping column", "%s = '%s';\" % (tblName,colName,newVal) else: cmd = \"UPDATE %s", ":param db: database to connect to within server :type db:", "on Sep 24, 2013 @author: paepcke Modifications: - Dec 30,", "''' for cursor in self.cursors: try: cursor.close() except: pass try:", "values, insert the data into a specified table :param tblName:", "2013: Added closing of connection to close() method ''' import", "TABLE %s' % tableName) self.connection.commit() finally: cursor.close() def insert(self, tblName,", "data into a specified table :param tblName: name of table", "OPTIONALLY ENCLOSED BY '\\\"' LINES TERMINATED BY '\\\\n' %s\" %\\", "DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS", "(host, port, user, pwd, db)) def close(self): ''' Close all", "both mysql and mysqld in my.cnf). :param tblName: table into", ":type tblName: String :param colnameValueDict: mapping of column name to", "all cursors that are currently still open. ''' for cursor", "cursor = self.connection.cursor() # For if caller never exhausts the", "column names to column types. Example: {'col1' : 'INT', 'col2'", "tblName, colnameValueDict): ''' Given a dictionary mapping column names to", "A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE", "bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts large number of rows", "command. Execute that command via subprocess.call(). Using a cursor.execute() fails", "query() method is an iterator. So:: for result in mySqlObj.query('SELECT", "% el) else: resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr): '''", "tableName, schema): ''' Create new table, given its name, and", "self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName, colName,", "include the 'FROM' keyword) :type fromCondition: String ''' cursor =", "colVals): ''' Given a list of items, return a string", "new table, given its name, and schema. The schema is", "''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' % tableName)", "may be used to endorse or promote products derived from", "paepcke Modifications: - Dec 30, 2013: Added closing of connection", "write the values to a temp file, then generate a", "in all rows are updated to\\ the given value. Syntax", "update(self, tblName, colName, newVal, fromCondition=None): ''' Update one column with", "except: pass def createTable(self, tableName, schema): ''' Create new table,", "try: if fromCondition is None: cmd = \"UPDATE %s SET", "THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY EXPRESS", "of table to insert into :type tblName: String :param colnameValueDict:", "''' import re import subprocess import tempfile import pymysql #import", "types :type schema: Dict<String,String> ''' colSpec = '' for colName,", "mapping of column name to column value :type colnameValueDict: Dict<String,Any>", "''' Given a list of items, return a string that", "'My Poem') ---> '10, My Poem' :param colVals: list of", "that selects which rows to update.\\ if None, the named", "iterator. So:: for result in mySqlObj.query('SELECT * FROM foo'): print", "True: nextRes = cursor.fetchone() if nextRes is None: cursor.close() return", "NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE", "port :type port: int :param user: user to log in", "column values destined for a MySQL table :type colVals: <any>", "LINES TERMINATED BY '\\\\n' %s\" %\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList)", "OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE", "OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED", "(host,port,user,passwd,db)): return self.user = user self.pwd = <PASSWORD> self.db =", "= self.connection.cursor() # For if caller never exhausts the results", "to column value :type colnameValueDict: Dict<String,Any> ''' colNames, colValues =", "must reproduce the above copyright notice, this list of conditions", "BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\" AND ANY", ":type port: int :param user: user to log in as", "(str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName,", "''' # If all arguments are set to None, we", ":type newVal: type acceptable to MySQL for the given column", "% (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals): '''", "OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,", ":param queryStr: query :type queryStr: String ''' cursor = self.connection.cursor()", "password to use for given user :type passwd: string :param", "''' Delete all table rows. No errors :param tableName: name", "MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN", "OF SUCH DAMAGE. ''' Created on Sep 24, 2013 @author:", "= user self.pwd = <PASSWORD> self.db = db self.cursors =", "names to column values, insert the data into a specified", "hold the values. Order of\\ values must corresond to order", "to MySQL for the given column :type newVal: type acceptable", "SET %s = '%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit()", "conditions and the following disclaimer. # 2. Redistributions in binary", "for a MySQL table :type colVals: <any> ''' resList =", "valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray: array of n-tuples,", "host:%s, port:%s, user:%s, pwd:%s, db:%s' % (host, port, user, pwd,", "''' resList = [] for el in colVals: if isinstance(el,", "and the following disclaimer. # 2. Redistributions in binary form", "except pymysql.OperationalError: pwd = '...............' if len(passwd) > 0 else", "ValueError('Cannot reach MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s'", "HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,", "within server :type db: string ''' # If all arguments", "# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND", "condition that selects which rows to update.\\ if None, the", "the colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s; LOAD", "a list of items, return a string that preserves MySQL", "BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR", "to column types. Example: {'col1' : 'INT', 'col2' : 'TEXT'}", "the following disclaimer. # 2. Redistributions in binary form must", "use for given user :type passwd: string :param db: database", "import tempfile import pymysql #import MySQLdb class MySQLDB(object): ''' Shallow", "following disclaimer. # 2. Redistributions in binary form must reproduce", "','.join(map(str,myList)) won't work: (10, 'My Poem') ---> '10, My Poem'", "column types :type schema: Dict<String,String> ''' colSpec = '' for", "newVal, fromCondition=None): ''' Update one column with a new value.", "dictionary mapping column names to column values, insert the data", "selects which rows to update.\\ if None, the named column", "call. :param queryStr: query :type queryStr: String ''' cursor =", "of conditions and the following disclaimer in the documentation and/or", "host: MySQL host :type host: string :param port: MySQL host's", "SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,", "IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES", "typing. Example: (10, 'My Poem') ---> '10, \"My Poem\"' Note", "Copyright (c) 2014, Stanford University # All rights reserved. #", "mysql and mysqld in my.cnf). :param tblName: table into which", "[(<anyMySQLCompatibleTypes>[<anyMySQLCompatibleTypes,...]]) ''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple)", "INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT", "TERMINATED BY '\\\\n' %s\" %\\ (self.db, tmpCSVFile.name, tblName, mySQLColNameList) subprocess.call(['mysql',", "if None, the named column in all rows are updated", "notice, this list of conditions and the following disclaimer in", "# If all arguments are set to None, we are", "Inserts large number of rows into given table. Strategy: write", "else: resList.append(el) return ','.join(map(str,resList)) def query(self, queryStr): ''' Query iterator.", "''' cursor = self.connection.cursor() # For if caller never exhausts", ": 'INT', 'col2' : 'TEXT'} :param tableName: name of new", "IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,", "Example: (10, 'My Poem') ---> '10, \"My Poem\"' Note that", "' + str(colVal) + ',' cmd = 'CREATE TABLE IF", "' % (tableName, colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit()", "All rights reserved. # # Redistribution and use in source", "host :type host: string :param port: MySQL host's port :type", "in mySqlObj.query('SELECT * FROM foo'): print result ''' def __init__(self,", "and the following disclaimer in the documentation and/or other materials", "and/or other materials provided with the distribution. # 3. Neither", "''' colSpec = '' for colName, colVal in schema.items(): colSpec", "def update(self, tblName, colName, newVal, fromCondition=None): ''' Update one column", "table :param tblName: name of table to insert into :type", ":param tblName: table into which to insert :type tblName: string", "which to insert :type tblName: string :param colNameTuple: tuple containing", "(%s) ' % (tableName, colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd)", "types. Example: {'col1' : 'INT', 'col2' : 'TEXT'} :param tableName:", "given column :type newVal: type acceptable to MySQL for the", "the documentation and/or other materials provided with the distribution. #", "DATA LOCAL is not supported in this MySQL version...' even", ":type schema: Dict<String,String> ''' colSpec = '' for colName, colVal", "value. Syntax must conform to what may be in\\ a", "results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr) while True: nextRes =", "of n-tuples, which hold the values. Order of\\ values must", "= \"USE %s; LOAD DATA LOCAL INFILE '%s' INTO TABLE", "user: user to log in as :type user: string :param", "this list of conditions and the following disclaimer in the", "'TEXT'} :param tableName: name of new table :type tableName: String", "(10, 'My Poem') ---> '10, My Poem' :param colVals: list", "finally: cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts large", "modification, are permitted provided that the following conditions are met:", "schema is a dict mappingt column names to column types.", "+ '\\n') try: # Remove quotes from the values inside", "are unittesting: if all(arg is None for arg in (host,port,user,passwd,db)):", "in the documentation and/or other materials provided with the distribution.", "names in proper order, i.e. \\ corresponding to valueTupleArray orders.", "subsequent call. :param queryStr: query :type queryStr: String ''' cursor", "+ str(colVal) + ',' cmd = 'CREATE TABLE IF NOT", "colVals: <any> ''' resList = [] for el in colVals:", "INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY", "that command via subprocess.call(). Using a cursor.execute() fails with error", "for the given column :param fromCondition: optionally condition that selects", "newVal: type acceptable to MySQL for the given column :param", "OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED", "self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray): ''' Inserts", "use in source and binary forms, with or without modification,", "to\\ the given value. Syntax must conform to what may", "table :type tableName: String :param schema: dictionary mapping column names", "= cursor.fetchone() if nextRes is None: cursor.close() return yield nextRes", "tblName, mySQLColNameList) subprocess.call(['mysql', '-u', self.user, '-p%s'%self.pwd, '-e', mySQLCmd]) finally: tmpCSVFile.close()", "corresponding to valueTupleArray orders. :type colNameTuple: (str[,str[...]]) :param valueTupleArray: array", ":type tblName: string :param colNameTuple: tuple containing column names in", "ENCLOSED BY '\\\"' LINES TERMINATED BY '\\\\n' %s\" %\\ (self.db,", "WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self,", "LOAD DATA LOCAL INFILE '%s' INTO TABLE %s FIELDS TERMINATED", "= \"UPDATE %s SET %s = '%s';\" % (tblName,colName,newVal) else:", "then generate a LOAD INFILE LOCAL MySQL command. Execute that", "= '%s';\" % (tblName,colName,newVal) else: cmd = \"UPDATE %s SET", "reserved. # # Redistribution and use in source and binary", "other materials provided with the distribution. # 3. Neither the", ":param tableName: name of table :type tableName: String ''' cursor", "'...............' if len(passwd) > 0 else '<no password>' raise ValueError('Cannot", "to insert :type tblName: string :param colNameTuple: tuple containing column", "{'col1' : 'INT', 'col2' : 'TEXT'} :param tableName: name of", "MySQLDB(object): ''' Shallow interface to MySQL databases. Some niceties nonetheless.", ":param tblName: name of table in which update is to", "DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON", "each subsequent call. :param queryStr: query :type queryStr: String '''", "method is an iterator. So:: for result in mySqlObj.query('SELECT *", "queryStr): ''' Query iterator. Given a query, return one result", ":type queryStr: String ''' cursor = self.connection.cursor() # For if", "for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try: # Remove", "are set to None, we are unittesting: if all(arg is", "of its contributors may be used to endorse or promote", "else '<no password>' raise ValueError('Cannot reach MySQL server with host:%s,", "subprocess import tempfile import pymysql #import MySQLdb class MySQLDB(object): '''", "one result for each subsequent call. :param queryStr: query :type", "ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Created on", "% tableName) self.connection.commit() finally: cursor.close() def insert(self, tblName, colnameValueDict): '''", "which hold the values. Order of\\ values must corresond to", "PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER", "forms, with or without modification, are permitted provided that the", "(tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals): ''' Given", "and schema. The schema is a dict mappingt column names", "caller never exhausts the results by repeated calls: self.cursors.append(cursor) cursor.execute(queryStr)", "# # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS", "acceptable to MySQL for the given column :type newVal: type", "values to a temp file, then generate a LOAD INFILE", "%s SET %s = '%s' WHERE %s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd)", "TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,", "generate a LOAD INFILE LOCAL MySQL command. Execute that command", "updated to\\ the given value. Syntax must conform to what", "basestring): resList.append('\"%s\"' % el) else: resList.append(el) return ','.join(map(str,resList)) def query(self,", "result for each subsequent call. :param queryStr: query :type queryStr:", "cursor.close() def ensureSQLTyping(self, colVals): ''' Given a list of items,", "ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR", "String :param newVal: value acceptable to MySQL for the given", "self.connection.cursor() try: cmd = 'INSERT INTO %s (%s) VALUES (%s)'", "which update is to occur :type tblName: String :param colName:", "conditions and the following disclaimer in the documentation and/or other", "Syntax must conform to what may be in\\ a MySQL", "update.\\ if None, the named column in all rows are", "or promote products derived from this software without specific prior", "IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS IS\"", "new value. :param tblName: name of table in which update", "list of conditions and the following disclaimer. # 2. Redistributions", "query(self, queryStr): ''' Query iterator. Given a query, return one", "pymysql #import MySQLdb class MySQLDB(object): ''' Shallow interface to MySQL", "try: self.connection.close() except: pass def createTable(self, tableName, schema): ''' Create", "MySQL for the given column :param fromCondition: optionally condition that", "len(passwd) > 0 else '<no password>' raise ValueError('Cannot reach MySQL", "schema: dictionary mapping column names to column types :type schema:", "FROM clause (don't include the 'FROM' keyword) :type fromCondition: String", "MySQL command. Execute that command via subprocess.call(). Using a cursor.execute()", "met: # 1. Redistributions of source code must retain the", "values must corresond to order of column names in colNameTuple.", "cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit() finally: cursor.close() def insert(self,", "''' Inserts large number of rows into given table. Strategy:", "materials provided with the distribution. # 3. Neither the name", "mapping column names to column values, insert the data into", "MySQL table :type colVals: <any> ''' resList = [] for", "cursor = self.connection.cursor() try: cursor.execute('DROP TABLE IF EXISTS %s' %", ":type tableName: String ''' cursor = self.connection.cursor() try: cursor.execute('DROP TABLE", "if caller never exhausts the results by repeated calls: self.cursors.append(cursor)", "copyright notice, this list of conditions and the following disclaimer", "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR", "permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT", "pass def createTable(self, tableName, schema): ''' Create new table, given", "%s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit()", "the names of its contributors may be used to endorse", "self.user = user self.pwd = <PASSWORD> self.db = db self.cursors", "try: self.connection = pymysql.connect(host=host, port=port, user=user, passwd=<PASSWORD>, db=db) #self.connection =", "iterator. Given a query, return one result for each subsequent", "db self.cursors = [] try: self.connection = pymysql.connect(host=host, port=port, user=user,", "a dict mappingt column names to column types. Example: {'col1'", "the values. Order of\\ values must corresond to order of", "port=3306, user='root', passwd='', db='mysql'): ''' :param host: MySQL host :type", "pymysql.OperationalError: pwd = '...............' if len(passwd) > 0 else '<no", "safely. No errors :param tableName: name of table :type tableName:", "cursor.close() def truncateTable(self, tableName): ''' Delete all table rows. No", "proper order, i.e. \\ corresponding to valueTupleArray orders. :type colNameTuple:", "',' OPTIONALLY ENCLOSED BY '\\\"' LINES TERMINATED BY '\\\\n' %s\"", "if fromCondition is None: cmd = \"UPDATE %s SET %s", "temp file, then generate a LOAD INFILE LOCAL MySQL command.", "''' Shallow interface to MySQL databases. Some niceties nonetheless. The", "won't work: (10, 'My Poem') ---> '10, My Poem' :param", "provided with the distribution. # 3. Neither the name of", "def close(self): ''' Close all cursors that are currently still", "',' cmd = 'CREATE TABLE IF NOT EXISTS %s (%s)", "SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;", "University # All rights reserved. # # Redistribution and use", "to connect to within server :type db: string ''' #", "Some niceties nonetheless. The query() method is an iterator. So::", "Given a list of items, return a string that preserves", "#except MySQLdb.OperationalError: except pymysql.OperationalError: pwd = '...............' if len(passwd) >", "self.connection.commit() finally: cursor.close() def insert(self, tblName, colnameValueDict): ''' Given a", "INFILE '%s' INTO TABLE %s FIELDS TERMINATED BY ',' OPTIONALLY", "OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.", "error 'LOAD DATA LOCAL is not supported in this MySQL", "changed :type colName: String :param newVal: value acceptable to MySQL", "databases. Some niceties nonetheless. The query() method is an iterator.", "the given value. Syntax must conform to what may be", "column names to column values, insert the data into a", "Using a cursor.execute() fails with error 'LOAD DATA LOCAL is", "column types. Example: {'col1' : 'INT', 'col2' : 'TEXT'} :param", "= 'INSERT INTO %s (%s) VALUES (%s)' % (str(tblName), ','.join(colNames),", "self.connection.cursor() try: if fromCondition is None: cmd = \"UPDATE %s", "% (str(tblName), ','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self,", "1. Redistributions of source code must retain the above copyright", "24, 2013 @author: paepcke Modifications: - Dec 30, 2013: Added", "MySQLdb.connect(host=host, port=port, user=user, passwd=passwd, db=db, local_infile=1) #except MySQLdb.OperationalError: except pymysql.OperationalError:", "SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \"AS", "name of table in which update is to occur :type", "cursor = self.connection.cursor() try: cmd = 'INSERT INTO %s (%s)", "column names in proper order, i.e. \\ corresponding to valueTupleArray", "for el in colVals: if isinstance(el, basestring): resList.append('\"%s\"' % el)", "# For if caller never exhausts the results by repeated", "Create new table, given its name, and schema. The schema", "reach MySQL server with host:%s, port:%s, user:%s, pwd:%s, db:%s' %", "= self.connection.cursor() try: cmd = 'INSERT INTO %s (%s) VALUES", "to use for given user :type passwd: string :param db:", "= 'CREATE TABLE IF NOT EXISTS %s (%s) ' %", "''' colNames, colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd", "= '' for colName, colVal in schema.items(): colSpec += str(colName)", "pwd:%s, db:%s' % (host, port, user, pwd, db)) def close(self):", "db:%s' % (host, port, user, pwd, db)) def close(self): '''", "to column types :type schema: Dict<String,String> ''' colSpec = ''", "table safely. No errors :param tableName: name of table :type", "%s;\" % (tblName,colName,newVal,fromCondition) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals):", "colNames, colValues = zip(*colnameValueDict.items()) cursor = self.connection.cursor() try: cmd =", "with the distribution. # 3. Neither the name of the", "[] for el in colVals: if isinstance(el, basestring): resList.append('\"%s\"' %", "queryStr: String ''' cursor = self.connection.cursor() # For if caller", "'-e', mySQLCmd]) finally: tmpCSVFile.close() def update(self, tblName, colName, newVal, fromCondition=None):", "''' def __init__(self, host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): ''' :param", "(tableName, colSpec[:-1]) cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close()", "+ ',' cmd = 'CREATE TABLE IF NOT EXISTS %s", "* FROM foo'): print result ''' def __init__(self, host='127.0.0.1', port=3306,", "The query() method is an iterator. So:: for result in", "(load-infile=1 for both mysql and mysqld in my.cnf). :param tblName:", "name of table to insert into :type tblName: String :param", "cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName, colNameTuple, valueTupleArray): '''", "in self.cursors: try: cursor.close() except: pass try: self.connection.close() except: pass", "import subprocess import tempfile import pymysql #import MySQLdb class MySQLDB(object):", "'LOAD DATA LOCAL is not supported in this MySQL version...'", "in colVals: if isinstance(el, basestring): resList.append('\"%s\"' % el) else: resList.append(el)", "DAMAGE. ''' Created on Sep 24, 2013 @author: paepcke Modifications:", "schema.items(): colSpec += str(colName) + ' ' + str(colVal) +", "ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,", "import pymysql #import MySQLdb class MySQLDB(object): ''' Shallow interface to", "into which to insert :type tblName: string :param colNameTuple: tuple", "+= str(colName) + ' ' + str(colVal) + ',' cmd", "self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self, tableName): '''", "Close all cursors that are currently still open. ''' for", "TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT", "closing of connection to close() method ''' import re import", "finally: cursor.close() def insert(self, tblName, colnameValueDict): ''' Given a dictionary", "to a temp file, then generate a LOAD INFILE LOCAL", "for colName, colVal in schema.items(): colSpec += str(colName) + '", "USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY", "niceties nonetheless. The query() method is an iterator. So:: for", "LOCAL is not supported in this MySQL version...' even though", "string :param colNameTuple: tuple containing column names in proper order,", "table in which update is to occur :type tblName: String", "2. Redistributions in binary form must reproduce the above copyright", "Update one column with a new value. :param tblName: name", "cursor.execute(cmd) self.connection.commit() finally: cursor.close() def ensureSQLTyping(self, colVals): ''' Given a", "to MySQL for the given column :param fromCondition: optionally condition", "cursor = self.connection.cursor() try: cursor.execute(cmd) self.connection.commit() finally: cursor.close() def dropTable(self,", "ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,", "try: cursor.execute('TRUNCATE TABLE %s' % tableName) self.connection.commit() finally: cursor.close() def", "port:%s, user:%s, pwd:%s, db:%s' % (host, port, user, pwd, db))", "ensureSQLTyping(self, colVals): ''' Given a list of items, return a", "= tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) + '\\n') try:", "version...' even though MySQL is set up to allow the", "DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT", "for result in mySqlObj.query('SELECT * FROM foo'): print result '''", "String ''' cursor = self.connection.cursor() try: if fromCondition is None:", "its name, and schema. The schema is a dict mappingt", "to be changed :type colName: String :param newVal: value acceptable", "''' tmpCSVFile = tempfile.NamedTemporaryFile(dir='/tmp',prefix='userCountryTmp',suffix='.csv') for valueTuple in valueTupleArray: tmpCSVFile.write(','.join(valueTuple) +", "String ''' cursor = self.connection.cursor() try: cursor.execute('TRUNCATE TABLE %s' %", "colName: String :param newVal: value acceptable to MySQL for the", "MySQLdb class MySQLDB(object): ''' Shallow interface to MySQL databases. Some", "<any> ''' resList = [] for el in colVals: if", "PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT", "IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ''' Created", "port, user, pwd, db)) def close(self): ''' Close all cursors", "for given user :type passwd: string :param db: database to", "','.join(colNames), self.ensureSQLTyping(colValues)) cursor.execute(cmd) self.connection.commit() finally: cursor.close() def bulkInsert(self, tblName, colNameTuple,", "to close() method ''' import re import subprocess import tempfile", "colNameTuple's: mySQLColNameList = re.sub(\"'\",\"\",str(colNameTuple)) mySQLCmd = \"USE %s; LOAD DATA", "'10, My Poem' :param colVals: list of column values destined", "host='127.0.0.1', port=3306, user='root', passwd='', db='mysql'): ''' :param host: MySQL host", "# 2. Redistributions in binary form must reproduce the above", "Neither the name of the copyright holder nor the names" ]
[ "xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\",", "from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version)", "xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain", "xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if", "print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex())", "print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick())", "import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth)", "xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex()) print(\"Key ID:\", xkey.keyid().hex())", "xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex())", "print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\",", "print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\",", "one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent", "xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex()) print(\"Key", "from lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey =", "#!./venv/bin/python from lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey", "lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\",", "<reponame>treys/crypto-key-derivation #!./venv/bin/python from lib.mbp32 import XKey from lib.utils import one_line_from_stdin", "lib.mbp32 import XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin())", "print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex()) print(\"Key ID:\", xkey.keyid().hex()) print(\"XKey:\",", "number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private", "code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public", "XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\",", "xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\",", "= XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex())", "print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key) if xkey.key.get_private_bytes():", "if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex()) print(\"Key ID:\",", "print(\"Key:\", xkey.key) if xkey.key.get_private_bytes(): print(\"Private bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex())", "xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\",", "XKey.from_xkey(one_line_from_stdin()) print(xkey) print(\"Version:\", xkey.version) print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child", "FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\", xkey.chain_code.hex()) print(\"Key:\", xkey.key)", "bytes:\", xkey.key.get_private_bytes().hex()) print(\"Public bytes:\", xkey.key.get_public_bytes().hex()) print(\"Key ID:\", xkey.keyid().hex()) print(\"XKey:\", xkey.to_xkey().decode('ascii'))", "import XKey from lib.utils import one_line_from_stdin xkey = XKey.from_xkey(one_line_from_stdin()) print(xkey)", "print(\"Depth:\", xkey.depth) print(\"Parent FP:\", xkey.parent_fp.hex()) print(\"Child number:\", xkey.child_number_with_tick()) print(\"Chain code:\"," ]
[ "deg/pix (x) =', fov_x/dim) print(' deg/pix (y) =', fov_y/dim) ang_fov(sample_order)", "dimension of tangent image corners = tangent_image_corners(b, s) # Corners", "spherical_distortion.util import * sample_order = 9 # Input resolution to", "to examine def ang_fov(s): print('Spherical Resolution:', s) for b in", "print(' FOV (x) =', fov_x) print(' FOV (y) =', fov_y)", "s) # Corners of each tangent image fov_x, fov_y =", "# Corners of each tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners)", "in range(s): dim = tangent_image_dim(b, s) # Pixel dimension of", "= tangent_image_corners(b, s) # Corners of each tangent image fov_x,", "dim = tangent_image_dim(b, s) # Pixel dimension of tangent image", "Corners of each tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print('", "base level', b) print(' FOV (x) =', fov_x) print(' FOV", "tangent_image_dim(b, s) # Pixel dimension of tangent image corners =", "= compute_tangent_image_angular_resolution(corners) print(' At base level', b) print(' FOV (x)", "b) print(' FOV (x) =', fov_x) print(' FOV (y) =',", "= tangent_image_dim(b, s) # Pixel dimension of tangent image corners", "from spherical_distortion.util import * sample_order = 9 # Input resolution", "import * sample_order = 9 # Input resolution to examine", "s) for b in range(s): dim = tangent_image_dim(b, s) #", "of each tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At", "for b in range(s): dim = tangent_image_dim(b, s) # Pixel", "b in range(s): dim = tangent_image_dim(b, s) # Pixel dimension", "tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At base level',", "tangent_image_corners(b, s) # Corners of each tangent image fov_x, fov_y", "=', fov_y) print(' deg/pix (x) =', fov_x/dim) print(' deg/pix (y)", "FOV (y) =', fov_y) print(' deg/pix (x) =', fov_x/dim) print('", "corners = tangent_image_corners(b, s) # Corners of each tangent image", "print(' At base level', b) print(' FOV (x) =', fov_x)", "(y) =', fov_y) print(' deg/pix (x) =', fov_x/dim) print(' deg/pix", "Pixel dimension of tangent image corners = tangent_image_corners(b, s) #", "each tangent image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At base", "print(' deg/pix (x) =', fov_x/dim) print(' deg/pix (y) =', fov_y/dim)", "tangent image corners = tangent_image_corners(b, s) # Corners of each", "sample_order = 9 # Input resolution to examine def ang_fov(s):", "range(s): dim = tangent_image_dim(b, s) # Pixel dimension of tangent", "fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At base level', b) print('", "= 9 # Input resolution to examine def ang_fov(s): print('Spherical", "# Input resolution to examine def ang_fov(s): print('Spherical Resolution:', s)", "fov_x) print(' FOV (y) =', fov_y) print(' deg/pix (x) =',", "At base level', b) print(' FOV (x) =', fov_x) print('", "of tangent image corners = tangent_image_corners(b, s) # Corners of", "print('Spherical Resolution:', s) for b in range(s): dim = tangent_image_dim(b,", "Resolution:', s) for b in range(s): dim = tangent_image_dim(b, s)", "(x) =', fov_x) print(' FOV (y) =', fov_y) print(' deg/pix", "image fov_x, fov_y = compute_tangent_image_angular_resolution(corners) print(' At base level', b)", "print(' FOV (y) =', fov_y) print(' deg/pix (x) =', fov_x/dim)", "ang_fov(s): print('Spherical Resolution:', s) for b in range(s): dim =", "Input resolution to examine def ang_fov(s): print('Spherical Resolution:', s) for", "=', fov_x) print(' FOV (y) =', fov_y) print(' deg/pix (x)", "s) # Pixel dimension of tangent image corners = tangent_image_corners(b,", "# Pixel dimension of tangent image corners = tangent_image_corners(b, s)", "examine def ang_fov(s): print('Spherical Resolution:', s) for b in range(s):", "fov_y = compute_tangent_image_angular_resolution(corners) print(' At base level', b) print(' FOV", "level', b) print(' FOV (x) =', fov_x) print(' FOV (y)", "* sample_order = 9 # Input resolution to examine def", "9 # Input resolution to examine def ang_fov(s): print('Spherical Resolution:',", "compute_tangent_image_angular_resolution(corners) print(' At base level', b) print(' FOV (x) =',", "FOV (x) =', fov_x) print(' FOV (y) =', fov_y) print('", "fov_y) print(' deg/pix (x) =', fov_x/dim) print(' deg/pix (y) =',", "resolution to examine def ang_fov(s): print('Spherical Resolution:', s) for b", "def ang_fov(s): print('Spherical Resolution:', s) for b in range(s): dim", "image corners = tangent_image_corners(b, s) # Corners of each tangent" ]
[ "returns the functional hash of a particular node. The default", "to the graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove", "the node object associated with the node name. Parameters ----------", "graph : Node or None Parent graph of this node.", "index_list = self.args return var def set_name(self, name): \"\"\" Set", "(\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self) def __rsub__(self, other): return sub(other,", "@contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\" Ensure that all `dependencies` are", "isinstance(self.args[1], Node) else tuple([]) op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape)", "\"\"\" tuple : Positional arguments which are used for executing", "is not an `Node` instance, node name, or a sequence", "dictionary of node names with their values. .. note:: This", "self @property def name(self): \"\"\"str : Unique name of the", "self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([]) op1 = np.asarray(list(map(lambda x:", "chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_", "isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain else:", "not in self.nodes.keys(): raise KeyError(f\"{name} not in {self.name} keys:\\n\" f\"Node", "nodeop(operator.concat) contains = nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem)", "\"\"\" return self._graph def preds(self): return self._preds def succs(self): return", "\"var_index\", \"index\")) else other.__sub__(self) def __pow__(self, other): return pow_(self, other,", "dict Keyword arguments passed to the `_evaluate` method. \"\"\" _graph_stack", "else: for idx, d in enumerate(self.domain.dom_set): if shape and isinstance(shape[idx],", "return self.target(*args, **kwargs) def __call__(self, *args, **kwargs): return call(self, *args,", "inv(self, graph=self.graph) def __neg__(self): return neg(self, graph=self.graph) def __abs__(self): return", "op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target @property def domain(self): return self.kwargs[\"domain\"]", "Node or str Node instance or name of an node.", "slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift, self,", "{self.shape}\\n\" f\"Key: {key}\") name = f\"{self.name}{key}\" if name not in", "(func_op, Integral)): s.append(shape[idx]) elif shape and isinstance(shape[idx], float): s.append(int(shape[idx])) elif", "target, *args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\" in kwargs", "node is evaluated. \"\"\" for node in self.dependencies: node.evaluate(context, callback)", "== (1,) and len(indices) == 1: res = res[0] self.domain.set_computed(out_shape,", "Context in which to evaluate the nodes. callback : callable", "= nodeop(builtins.format) len_ = nodeop(builtins.len) property_ = nodeop(builtins.property) type_ =", "nodes given a dictionary of node names with their values.", "disable=W0235 if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"],", "elif isinstance(shape, Integral): self._shape = tuple([shape]) elif isinstance(shape, Node): self._shape", "isinstance(fetches, Sequence): single = False else: raise ValueError(\"`fetches` must be", "object] Context whose keys are node instances or names. kwargs", "isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node, context=context,", "== \"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args) @shape.setter", "def name(self, name): self.set_name(name) @args.setter def args(self, args): new_args =", "should implement this function to evaluate the node. \"\"\" return", "or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([]))", "Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx =", "bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int) open_", "% (self.name, self.args) class slice_op(Node): \"\"\" Node representing multi-dimensional operations", "{self} with variable shape {self.var.shape}\") return True else: return self.var.shape", "has a default value to use for execution, it can", "arguments passed to the target \"\"\" def __init__(self, target, *args,", "def __rmul__(self, other): return slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self,", "interactive and not fname.startswith('<'): break # pragma: no cover interactive", "import uuid import numpy as np import importlib from .graph", "= [node] for n in node: stack = [] if", "def __rrshift__(self, other): return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other,", "any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_", "op_name : str Operation name which describes the node functionality.", "graph=self.graph) def __rmatmul__(self, other): return matmul(other, self, graph=self.graph) def __mul__(self,", "else a for a in self.args] @property def shape(self): \"\"\"", "names with node instances. .. note:: This function modifies the", "pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "fname = frame.filename if 'polymath' in fname: continue # Stop", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self) def __lt__(self, other): return", "\"var_index\", \"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self, other): return floordiv(other, self,", "slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv, other,", "nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all)", "[] if isinstance(key, Node): name.append(key.name) elif hasattr(key, \"__len__\") and not", "= self.args return var def set_name(self, name): \"\"\" Set the", "be a mapping.\") nodes = list(context) # Add the keyword", "return slice_op(operator.and_, other, self, graph=self.graph) def __or__(self, other): return slice_op(operator.or_,", "self._shape = tuple(s) def is_scalar(self, val): return not isinstance(val, np.ndarray)", "self : Node This node. Raises ------ ValueError If an", "@nodeop def call(func, *args, **kwargs): \"\"\" Call `func` with positional", "self == Node._graph_stack.pop() def __repr__(self): return \"<node '%s'>\" % self.name", "passed to `func`. kwargs : dict Mapping of keyword arguments", "def preds(self): return self._preds def succs(self): return self._preds def add_predecessor(self,", "func_op(Node): # pylint: disable=C0103,R0903 \"\"\" Node wrapper for stateless functions.", "else: shapes = [] for dim in shape: if isinstance(dim,", "or None Parent graph of this node. If graph is", "shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\") indices = list(map(lambda x:", ": dict Additional context information keyed by variable name. Returns", "integer values or index/index_op nodes. \"\"\" def __init__(self, target, *args,", "_is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx =", "other, graph=self.graph) def __repr__(self): return \"<var_index name=%s, index=%s>\" % (self.name,", "element in node) if isinstance(node, list): return [partial(element) for element", "**kwargs) @classmethod def evaluate_node(cls, node, context, **kwargs): \"\"\" Evaluate an", "deque import functools from numbers import Integral, Rational, Real import", "__rmul__(self, other): return mul(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return ret", "return ret else: name = [] if isinstance(key, Node): name.append(key.name)", "type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_", "= nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ =", "= deque([None]) _eval_stack = [] stack_size = 5 evaluated_nodes =", "single and not all([(idx_val - 1) >= indices[-1][idx] for idx,", "*args, **kwargs): \"\"\" Call `func` with positional arguments `args` and", "graph or Node._graph_stack[-1] return graph def instantiate_node(self, node): # pylint:disable=W0621", "def __mul__(self, other): return mul(self, other, graph=self.graph) if not _is_node_type_instance(other,", "node name. Parameters ---------- node : Node or str Node", "the node or `None` to use a random, unique identifier.", "slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<var_index name=%s, index=%s>\"", "var_index(self, key, graph=self) elif isinstance(key, tuple): return var_index(self, list(key), graph=self)", "arg in self.args] kwargs = {key: partial(value) for key, value", "arg1_dom = self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else", "name which describes the node functionality. \"\"\" return self._op_name @op_name.setter", "other.__mul__(self) def __truediv__(self, other): return truediv(self, other, graph=self.graph) if not", "context = {} elif not isinstance(context, Mapping): raise ValueError(\"`context` must", "name): \"\"\" Set the name of the node and update", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__and__(self) def __or__(self,", "else (k, self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n) def insert_node(self, node,", "__truediv__(self, other): return slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self, other):", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self)", "def domain(self): return self.kwargs[\"domain\"] @property def var(self): var, index_list =", "from functions. \"\"\" # This is called when the decorator", "private variable self.kwargs = kwargs self.graph = graph self._shape =", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self)", "a graph by replacing all node names with node instances.", "nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir)", "other.__rshift__(self) def __and__(self, other): return and_(self, other, graph=self.graph) if not", "val is not None and (not isinstance(val, np.ndarray) or (len(val.shape)", "tuple or None Shape of the output for a node.", "compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_", "isinstance(shape, Integral): self._shape = tuple([shape]) elif isinstance(shape, Node): self._shape =", "def __getitem__(self, key): if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if", "@property def name(self): \"\"\"str : Unique name of the node\"\"\"", "variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\"", "graph. Parameters ---------- graph : Node or None Graph to", "Evaluate the dependencies of this node and discard the values.", "c in context: if c in fetches and c.op_name in", "def set_name(self, name): \"\"\" Set the name of the node", "op1, op2, context=None, **kwargs): if self.is_scalar(op1) or self.is_scalar(op2): value =", "op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x:", "for i in c.name.split(\"/\")]) if c.write_count > 0 else c.name", "def __setstate__(self, data): self.__dict__.update(data) def set_name(self, name): \"\"\" Set the", "for attr in ['start', 'stop', 'step']]) return node except Exception", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self) def __mod__(self,", "a for a in self.args] @property def shape(self): \"\"\" tuple", "shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name = f\"{self.name}{key}\"", "__iter__(self): num = len(self) for i in range(num): yield self[i]", "other): return lt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "using `value`. kwargs : dict Keyword arguments passed to the", "graph is `None`, this is the top-level graph. op_name :", "evaluated_nodes = 0 def __init__(self, *args, name=None, shape=None, graph=None, dependencies=None,", "self._name @property def op_name(self): \"\"\" str : Operation name which", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self) def", "not node.is_shape_finalized(): context[node] = node.evaluate(context) return context def run(self, fetches,", "update_graph_key(self, old_key, new_key): n = list(map(lambda k: (new_key, self.nodes[k]) if", "Output of the nodes given the context. Raises ------ ValueError", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self,", "slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_", "to the variable, index combination. Parameters ---------- value : str", "self._predecessors.append(pred) def add_successor(self, succ): if isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ)", "context: return context[self] # Evaluate the parents partial = functools.partial(self.evaluate_node,", "instance or an node name. RuntimeError If `node` is an", "return self(*args, context, **kwargs) @classmethod def evaluate_node(cls, node, context, **kwargs):", "return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "succs(self): return self._preds def add_predecessor(self, pred): if isinstance(pred, Node): self._predecessors.append(pred.gname)", "other, self, graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv, self, other,", "uuid import numpy as np import importlib from .graph import", "= tuple(s) def is_scalar(self, val): return not isinstance(val, np.ndarray) or", "instances or names. kwargs : dict[str, object] Additional context information", "(list)): return var_index(self, key, graph=self) elif isinstance(key, tuple): return var_index(self,", ": tuple Positional arguments passed to the `_evaluate` method. name", "for executing this node. \"\"\" return tuple(self._args) @property def argnames(self):", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self)", "len(indices) == 1: res = res[0] self.domain.set_computed(out_shape, indices) return res", "creating nodes from functions. \"\"\" # This is called when", "in order to enable \"is shape finalized\" to work self._shape", "or node name\") def instantiate_graph(self, context, **kwargs): \"\"\" Instantiate a", "\" \"sequence thereof.\") fetches = [self.instantiate_node(node) for node in fetches]", "init_from_args(cls, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): if", "or (len(val.shape) == 1 and val.shape[0] == 1) def _evaluate(self,", "traceback.extract_stack(limit=1) @property def graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent graph of", "returning the explicitly given graph or using the default graph.", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self) def __rmod__(self,", "\"]\" if name in self.graph.nodes: return self.graph.nodes[name] elif isinstance(key, (list)):", "other): return xor(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "\"\"\" return func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\" Ensure", "def __init__(self, var, idx, name=None, **kwargs): # pylint: disable=W0235 if", "name return self def __getitem__(self, key): if self.is_shape_finalized() and len(self.nodes)", "Graph to return or `None` to use the default graph.", "raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\")", "node name, or a \" \"sequence thereof.\") fetches = [self.instantiate_node(node)", "return node raise ValueError(f\"'{node}' is not an `Node` instance or", "slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self, other): return slice_op(operator.ge, self,", "name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph:", "are executed before any nodes in this scope. Parameters ----------", "self.__class__.__name__ @name.setter def name(self, name): self.set_name(name) @args.setter def args(self, args):", "f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return self @property", "(\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self) def __radd__(self, other): return add(other,", "False for s in self.shape: if not isinstance(s, Integral): return", "default value to use for execution, it can be set", "functionality. \"\"\" return self._op_name @op_name.setter def op_name(self, op_name): if op_name:", "other): return matmul(other, self, graph=self.graph) def __mul__(self, other): return mul(self,", "x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2) return value def", "def __rtruediv__(self, other): return slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self,", "'%s' target=%s args=<%d items>>\" % \\ (self.name, self.kwargs[\"target\"], len(self.args)) def", "this scope. Parameters ---------- dependencies : list Sequence of nodes", "nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any)", "(slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain else: slice2_idx", "= value self.dependencies = [] self._args = [] self._predeecessors =", "= (1,) indices = (0,) single = True else: out_shape", "context=context, callback=callback) args = [partial(arg) for arg in self.args] kwargs", "def args(self, args): new_args = [] for arg in args:", "def __and__(self, other): return and_(self, other, graph=self.graph) if not _is_node_type_instance(other,", "other): return xor(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "\"index\")) else other.__add__(self) def __sub__(self, other): return sub(self, other, graph=self.graph)", "import Integral, Rational, Real import contextlib import traceback import uuid", "matmul(other, self, graph=self.graph) def __mul__(self, other): return mul(self, other, graph=self.graph)", "def is_shape_finalized(self): if self.shape == UNSET_SHAPE: return False for s", "the node callback = callback or _noop_callback with callback(self, context):", "other, self, graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift, self, other,", "= nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem = nodeop(operator.getitem) gt =", "kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\" in kwargs \\ else f\"{target.__name__}\"", "node and add the node to the graph self._name =", "= Domain(idx) super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs) @property def", "replicate the new name with a unique stringwhich corresponds to", ": dict keywoard arguments passed to the target \"\"\" def", "self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([])) self._target", "nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem = nodeop(operator.getitem) gt = nodeop(operator.gt)", "Unique name of the node. Returns ------- self : Node", "for node '{node}'\") context[node] = value if node.op_name in [\"placeholder\",", "list) else kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var,", "self.nodes[name] return ret else: name = [] if isinstance(key, Node):", "name=name, graph=self.graph) def set_shape(self, shape=None, init=False): s = [] assert", "instead belongs to\" f\" {node.graph}\") return node raise ValueError(f\"'{node}' is", "= nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ =", "this function returns a hash of all attributes and subgraphs", "self, graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow, self, other, graph=self.graph)", "slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else:", "args = [partial(arg) for arg in self.args] kwargs = {key:", "def __pow__(self, other): return slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self,", "val=None): if val is not None and (not isinstance(val, np.ndarray)", "!= len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index list does", "nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct)", "**kwargs) else: context[self] = self.value = self._evaluate(*args, **kwargs) return self.value", "\"var_index\", \"index\")) else other.__rand__(self) def __rand__(self, other): return and_(other, self,", "= nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ =", "\"Node\": self._op_name = self.name else: self._op_name = self.__class__.__name__ @name.setter def", "identifier. shape : tuple or None Shape of the output", "evaluated. Returns ------- value : object Output of the node", "np.prod(out_shape): if len(out_shape) > len(var.shape): for i in range(len(out_shape)): if", "slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod, other,", "if all([isinstance(sv, Integral) for sv in shape]) and len(self.domain) ==", "self.target(op1, op2) else: arg0_dom = self.args[0].domain arg1_dom = self.args[1].domain op1_idx", "nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod)", "frame in reversed(n._stack): # pylint: disable=protected-access # Do not capture", "domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: all_args", "*args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\" in kwargs \\", "slice2_var, slice2_idx def _evaluate(self, *args, **kwargs): for aa in list(kwargs.keys()):", "other): return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "to avoid the context being modified. Parameters ---------- context :", "def _evaluate(self, var, indices, **kwargs): if self.is_scalar(var): out_shape = (1,)", "self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self) @property def domain(self): return Domain(tuple([]))", "context, **kwargs) @classmethod def evaluate_node(cls, node, context, **kwargs): \"\"\" Evaluate", "__getattr__(self, name): return getattr_(self, name, graph=self.graph) def __getitem__(self, key): if", "other): return mod(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self) def", "@property def var(self): return self @property def name(self): \"\"\"str :", "!= len(self.shape): raise KeyError(f\"Invalid key shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\"", "ValueError(\"`context` must be a mapping.\") nodes = list(context) # Add", "traceback import uuid import numpy as np import importlib from", "traces fname = frame.filename if 'polymath' in fname: continue #", "to the target kwargs : dict keywoard arguments passed to", "the decorator is used with arguments if target is None:", "name(self, name): self.set_name(name) @args.setter def args(self, args): new_args = []", "[\"output\", \"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")])", "Parameters ---------- value : str Unique name of the node.", "other.__radd__(self) def __radd__(self, other): return add(other, self, graph=self.graph) if not", "*args, **kwargs): return call(self, *args, **kwargs) def __repr__(self): return \"<func_op", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self) def __gt__(self, other): return", "isinstance(shape, Node): self._shape = tuple([shape]) elif not shape or len(shape)", "`func` with positional arguments `args` and keyword arguments `kwargs`. Parameters", "`%s` defined at:\\n\\n%s\" % (n, stack) messages.append(message) raise ex from", "def __rfloordiv__(self, other): return slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self,", "if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif len(args) == 2: all_args", "self.name) class func_op(Node): # pylint: disable=C0103,R0903 \"\"\" Node wrapper for", "+= 1 try: if isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs)", "(\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self) def __lshift__(self, other): return lshift(self,", "self.target = target @property def domain(self): return self.kwargs[\"domain\"] def __getitem__(self,", "nodeop(operator.getitem) gt = nodeop(operator.gt) index = nodeop(operator.index) indexOf = nodeop(operator.indexOf)", "self.instantiate_node(node) if node in context: raise ValueError(f\"duplicate unequal value for", "for execution, it can be set using `value`. kwargs :", "or None Context in which to evaluate the nodes. callback", "else other.__sub__(self) def __pow__(self, other): return pow_(self, other, graph=self.graph) if", "values or index/index_op nodes. \"\"\" def __init__(self, var, idx, name=None,", "the graph. Parameters ---------- value : str Unique name of", "# pragma: no cover interactive = fname.startswith('<') stack.append(frame) stack =", "arguments `args` and keyword arguments `kwargs`. Parameters ---------- func :", "object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self)", "values. Parameters ---------- var : Node The multi-dimensional variable used", "this scope. \"\"\" # Add dependencies to the graph graph", "all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]):", "not in g.nodes: g = g.graph if name in g.nodes:", "other): return ne(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "self._shape = OrderedDict() self.shape = shape or tuple([]) # Get", "if isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self, succ): if", "pos = nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub", "Additional context information keyed by variable name. Returns ------- values", "any nodes defined in this scope. \"\"\" # Add dependencies", "= nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ =", "isinstance(dim, float): shapes.append(int(dim)) else: raise TypeError(f\"Shape value must be placeholder", "= name return self def __getitem__(self, key): if self.is_shape_finalized() and", "slice1_idx, slice2_var, slice2_idx def _evaluate(self, *args, **kwargs): for aa in", "not match {var.shape} in {self.var.name} - {self.var.op_name}\" f\"dimensions for slice", "nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate)", "nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_)", "normalized_context : dict[Node, object] Normalized context whose keys are node", "value for node '{node}'\") context[node] = value if node.op_name in", "of this node and discard the values. Parameters ---------- context", "name, or a \" \"sequence thereof.\") fetches = [self.instantiate_node(node) for", "= nodeop(operator.ne) neg = nodeop(operator.neg) not_ = nodeop(operator.not_) or_ =", "self.value = self._evaluate(*args, context=context, **kwargs) else: context[self] = self.value =", "continue # Stop tracing at the last interactive cell if", "node has a default value to use for execution, it", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self)", ": tuple positional arguments passed to the target kwargs :", "False if isinstance(node, Node) or not is_iterable(node): node = [node]", "other.__truediv__(self) def __rtruediv__(self, other): return truediv(other, self, graph=self.graph) if not", "kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx =", "**kwargs) if isinstance(node, tuple): return tuple(partial(element) for element in node)", "and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self", "__and__(self, other): return and_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "list(key), graph=self) else: return var_index(self, [key], graph=self) else: return self.nodes[key]", "def _evaluate(self, op1, op2, context=None, **kwargs): if self.is_scalar(op1) or self.is_scalar(op2):", "None If a node has a default value to use", "keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret = self.nodes[name] return ret else:", "If `node` is not an `Node` instance or an node", "if isinstance(self.args[1], Node) else tuple([]) op1 = np.asarray(list(map(lambda x: op1[x],", "[\"placeholder\", \"state\", \"input\", \"output\", \"temp\"] and not node.is_shape_finalized(): context[node] =", "[] if dependencies is None else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies)", "ValueError If the context specifies more than one value for", "def _wrapper(*args, **kwargs_inner): return func_op(target, *args, **kwargs_inner, **kwargs) return _wrapper", "else other.__rmul__(self) def __rmul__(self, other): return mul(other, self, graph=self.graph) if", "functions. Parameters ---------- target : callable function to evaluate the", "graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name = name return", "nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt = nodeop(operator.lt) matmul = nodeop(operator.matmul)", "node name\") def instantiate_graph(self, context, **kwargs): \"\"\" Instantiate a graph", "in enumerate(self.domain.dom_set): if shape and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif", "return \"<func_op '%s' target=%s args=<%d items>>\" % \\ (self.name, self.kwargs[\"target\"],", "not isinstance(context, Mapping): raise ValueError(\"`context` must be a mapping.\") nodes", "node.name not in self.nodes and (node.graph != self): raise RuntimeError(f\"node", "= np.expand_dims(var, axis=i) else: var = np.squeeze(var) if len(var.shape) !=", "= tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx)", "range(len(out_shape)): if out_shape[i] == 1: var = np.expand_dims(var, axis=i) else:", "Operation name which describes the node functionality. \"\"\" return self._op_name", "other): return slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self, other): return", "def __truediv__(self, other): return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other,", "g = g.graph if name in g.nodes: return g.nodes[name] raise", "(str, Node)): fetches = [fetches] single = True elif isinstance(fetches,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self) def __rtruediv__(self, other):", "self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return", "Node One or more `Node` instances or names to evaluate.", "0 def __init__(self, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None,", ": dict[Node, object] Normalized context whose keys are node instances.", "functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node, tuple): return tuple(partial(element) for element", "_is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx =", "_noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable class Node(object): \"\"\" Base", "= nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ = nodeop(builtins.open) str_ =", "or str, object] Context whose keys are node instances or", "other.__rpow__(self) def __matmul__(self, other): return matmul(self, other, graph=self.graph) def __rmatmul__(self,", "shape : tuple or None Shape of the output for", "matmul(self, other, graph=self.graph) def __rmatmul__(self, other): return matmul(other, self, graph=self.graph)", "\"index\")) else other.__ge__(self) def __ne__(self, other): return ne(self, other, graph=self.graph)", "f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name = f\"{self.name}{key}\" if name not", "= nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ =", "= slice2_var.domain else: slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx, slice2_var,", "`None`, this is the top-level graph. \"\"\" return self._graph def", "shape var for var index {self} with variable shape {self.var.shape}\")", "nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ = nodeop(builtins.len)", "op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2) return value def get_index_nodes(self,", "value in self.kwargs.items() if key not in self.added_attrs} # Evaluate", "tuple of integers or parameter node names. graph : Node", "# Choose a name for the node and add the", "in place. Use :code:`context=context.copy()` to avoid the context being modified.", "context : dict[Node or str, object] Context whose keys are", "__sub__(self, other): return slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self, other):", "nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr)", "len_ = nodeop(builtins.len) property_ = nodeop(builtins.property) type_ = nodeop(builtins.type) chr_", "x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value", "to evaluate the node. \"\"\" return self(*args, context, **kwargs) @classmethod", "__getitem__(self, key): if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if isinstance(key,", "c.op_name in [\"output\", \"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i", "graph by replacing all node names with node instances. ..", "node instances. .. note:: This function modifies the context in", "an `Node` instance, node name, or a sequence thereof. \"\"\"", "yield self[i] def __eq__(self, other): return hash(self) == hash(other) def", "graph instance by returning the explicitly given graph or using", "for key, value in node.items()} if isinstance(node, slice): return slice(*[partial(getattr(node,", "shape]) and len(self.domain) == np.product(shape) and len(shape) > 0: self._shape", "op2) else: arg0_dom = self.args[0].domain arg1_dom = self.args[1].domain op1_idx =", "if len(args) == 0: n = cls(name=name, shape=shape, graph=graph, op_name=op_name,", "domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: domain", "are used for executing this node. \"\"\" return tuple(self._args) @property", "passed to the target kwargs : dict keywoard arguments passed", "property_ = nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_", "list_ = nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_", "other): return slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self, other): return", "value : Any or None If a node has a", "# pragma: no cover messages = [] interactive = False", "context=context, **kwargs) else: context[self] = self.value = self._evaluate(*args, **kwargs) return", "name = [] if isinstance(key, Node): name.append(key.name) elif hasattr(key, \"__len__\")", "variable corresponding to input index values. Parameters ---------- var :", "node. Returns ------- self : Node This node. Raises ------", "Callback to be evaluated when an node is evaluated. \"\"\"", "__neg__(self): return neg(self, graph=self.graph) def __abs__(self): return abs_(self, graph=self.graph) def", "greater than the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\"", "np.expand_dims(var, axis=i) else: var = np.squeeze(var) if len(var.shape) != len(out_shape)", "enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has indices which are greater than", "self.nodes[key] def __add__(self, other): return add(self, other, graph=self.graph) if not", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self) def __lt__(self,", "__init__(self, var, idx, name=None, **kwargs): # pylint: disable=W0235 if \"domain\"", "for arg in args: if isinstance(arg, Node): if self.__class__.__name__ ==", "\"sequence thereof.\") fetches = [self.instantiate_node(node) for node in fetches] context", "the name for a variable index, making sure to replicate", "op_name=op_name, dependencies=dependencies, value=value, **kwargs) else: n = cls(*args, name=name, shape=shape,", "= nodeop(operator.lshift) lt = nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller =", "return reversed_(self, graph=self.graph) def update_graph_key(self, old_key, new_key): n = list(map(lambda", "Mapping of keyword arguments passed to `func`. \"\"\" return func(*args,", "__add__(self, other): return add(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "None self.op_name = op_name # Get the stack context so", "Positional arguments passed to the `_evaluate` method. name : str", "nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float)", "= graph or Node._graph_stack[-1] return graph def instantiate_node(self, node): #", "dependencies of this node and discard the values. Parameters ----------", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self) def", "target : callable function to evaluate the node args :", "Parameters ---------- target : cal The multi-dimensional variable used for", "in kwargs.items(): node = self.nodes[name] if node in context: raise", "dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return ret else: if isinstance(key,", "return getattr_(self, name, graph=self.graph) def __getitem__(self, key): if self.__class__.__name__ !=", "numbers import Integral, Rational, Real import contextlib import traceback import", "TODO: Need a way to check if the existing node", "exists in the associated graph. KeyError If the current name", "getitem(self, key, graph=self.graph) else: if isinstance(key, (list)): return var_index(self, key,", "\"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")]) if c.write_count", "{self.var.shape}\") return True else: return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self,", "**kwargs): if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"],", "= nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ =", "object] Additional context information keyed by variable name. Returns -------", "(node.graph != self): raise RuntimeError(f\"node '{node}' does not belong to", "__rand__(self, other): return and_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", ": tuple or None Shape of the output for a", "nodes given the context. Raises ------ ValueError If `fetches` is", "return inv(self, graph=self.graph) def __neg__(self): return neg(self, graph=self.graph) def __abs__(self):", "_evaluate(self, var, indices, **kwargs): if self.is_scalar(var): out_shape = (1,) indices", "e in self.args: self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self) @property def", "and not isinstance(key, str): for k in key: if isinstance(k,", "= Graph() self.value = value self.dependencies = [] self._args =", "idx, name=None, **kwargs): # pylint: disable=W0235 if \"domain\" in kwargs:", "the node and update the graph. Parameters ---------- value :", "(tuple, list)) if all([isinstance(sv, Integral) for sv in shape]) and", "elif hasattr(key, \"__len__\") and not isinstance(key, str): for k in", "from the graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ = nodeop(builtins.abs)", "does not belong to {self} graph, instead belongs to\" f\"", "set using `value`. kwargs : dict Keyword arguments passed to", "OrderedDict, Mapping, Sequence, deque import functools from numbers import Integral,", "str Operation name which describes the node functionality. value :", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self) def __rshift__(self, other): return", "of the node and update the graph. Parameters ---------- value", "messages.append(message) raise ex from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args, name=None,", "def __pos__(self): return pos(self, graph=self.graph) def __reversed__(self): return reversed_(self, graph=self.graph)", "cal The multi-dimensional variable used for indexing into. idx :", "nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr)", "def __bool__(self): return True def __hash__(self): return id(self) def func_hash(self):", "np.product(shape) and len(shape) > 0: self._shape = shape if isinstance(shape,", "to the `_evaluate` method. \"\"\" _graph_stack = deque([None]) _eval_stack =", "if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain", "\"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else", "= nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains =", "shape finalized\" to work self._shape = UNSET_SHAPE else: shapes =", "not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var for var index {self}", "list or numpy array, and cannot be sliced for {self.name}\")", "elif not isinstance(context, Mapping): raise ValueError(\"`context` must be a mapping.\")", "isinstance(var, list): var = np.asarray(var) if len(var.shape) != len(out_shape) and", "names with their values. .. note:: This function modifies the", "Node representing multi-dimensional operations performed on a node. Parameters ----------", "`fetches` is not an `Node` instance, node name, or a", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self) def __lshift__(self, other):", "op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1,", "nodes from functions. \"\"\" # This is called when the", "nodes. Graph: {self.graph}\") def __len__(self): #TODO: Update this to check", "graph=self.graph) def __sub__(self, other): return slice_op(operator.sub, self, other, graph=self.graph) def", "'{node}'\") context[node] = value if node.op_name in [\"placeholder\", \"state\", \"input\",", "= nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ =", "import Graph from .domain import Domain from .util import _noop_callback,", "s.append(d) self._shape = tuple(s) def is_scalar(self, val): return not isinstance(val,", "def __rpow__(self, other): return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other,", "finalzied shape if self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must be", "= slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain else: slice1_idx", "__le__(self, other): return le(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "callback(self, context): if self.__class__.__name__ == \"Node\": context[self] = self.value =", "tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args)", "f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\" Obtain", "isinstance(key, (list)): ret = var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple):", "*args, context=None, **kwargs): \"\"\" Inheriting nodes should implement this function", "None: context = {} elif not isinstance(context, Mapping): raise ValueError(\"`context`", "in context: raise ValueError(f\"duplicate value for node '{node}'\") context[node] =", "\"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self, other): return floordiv(other, self, graph=self.graph)", "_evaluate(self, *args, **kwargs): for aa in list(kwargs.keys()): if aa in", "a context. Parameters ---------- context : dict Normalised context in", "for node in fetches] context = self.instantiate_graph(context, **kwargs) for c", "try: if isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial =", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self) def __rand__(self,", "= nodeop(operator.sub) truediv = nodeop(operator.truediv) truth = nodeop(operator.truth) xor =", "{self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds:", "self._op_name @op_name.setter def op_name(self, op_name): if op_name: self._op_name = op_name", "\"\"\" Node wrapper for stateless functions. Parameters ---------- target :", "self.nodes = Graph() self.value = value self.dependencies = [] self._args", "= None self.name = name or uuid.uuid4().hex self._op_name = None", "if isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node,", "nodeop(operator.lshift) lt = nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller)", "else tuple(shape) else: for idx, d in enumerate(self.domain.dom_set): if shape", "def __rlshift__(self, other): return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other,", "else x, indices)) res = var[indices] if single else np.asarray([var[idx]", "tuple([shape]) elif isinstance(shape, Node): self._shape = tuple([shape]) elif not shape", ": Positional arguments which are used for executing this node.", "isinstance(a, Node) else a for a in self.args] @property def", "if target is None: return functools.partial(nodeop, **kwargs) # This is", "order='C') ret = self.nodes.item_by_index(idx) return ret else: if isinstance(key, (list)):", "context so we can report where the node was defined", "(\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self) def __rmul__(self, other): return mul(other,", "len(self.args)) def nodeop(target=None, **kwargs): \"\"\" Decorator for creating nodes from", "**kwargs) # This is called when the decorator is used", "if dependencies is None else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) #", "{dim}\" f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\"", "isinstance(node, Node) or not is_iterable(node): node = [node] for n", "_graph_stack = deque([None]) _eval_stack = [] stack_size = 5 evaluated_nodes", "avoid the context being modified. Parameters ---------- fetches : list[str", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self) def __rrshift__(self,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__add__(self) def __sub__(self, other):", "indices which are greater than the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\"", "def __ne__(self, other): return ne(self, other, graph=self.graph) if not _is_node_type_instance(other,", "of all dependencies relevant to this node self.dependencies = []", "this is the top-level graph. op_name : str Operation name", "nodes should implement this function to evaluate the node. \"\"\"", "len(var.shape): for i in range(len(out_shape)): if out_shape[i] == 1: var", "context, **kwargs): \"\"\" Evaluate an node or constant given a", "kwargs : dict[str, object] Additional context information keyed by variable", "{Node._eval_stack}\") if not single and not all([(idx_val - 1) >=", "shape if self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must be specified", "Graph(node_list) def __call__(self, *args, **kwargs): return self.run(*args, **kwargs) class EvaluationError(RuntimeError):", "the existing node is not equal to the current ndoe", "out_shape = (1,) indices = (0,) single = True else:", "nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii)", "function to evaluate the node. \"\"\" return self(*args, context, **kwargs)", "slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\"", "kwargs: kwargs.pop(\"name\") self.added_attrs = [] # TODO: CHange this to", "shape=None, init=False): s = [] assert isinstance(shape, (tuple, list)) if", "== DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs): if self.is_scalar(var): out_shape", "Sequence of nodes to be evaluted before evaluating any nodes", "g.nodes: g = g.graph if name in g.nodes: return g.nodes[name]", "is not None and name not in g.nodes: g =", "with type {type(var)} is not a list or numpy array,", "return True else: return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var,", "super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target @property", "slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self, other): return slice_op(operator.or_, other,", "the current ndoe as ewll if self.graph and name in", "(\"slice_op\", \"var_index\", \"index\")) else other.__and__(self) def __or__(self, other): return or_(self,", "elif isinstance(d, float): s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain) else: s.append(d)", "nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals)", "isinstance(s, Integral): return False return True def set_shape(self, shape=None, init=False):", ".domain import Domain from .util import _noop_callback, _flatten_iterable, node_hash, \\", "slice1_var.domain else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index)) or", "None else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a name", "other): return mod(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "= [] interactive = False if isinstance(node, Node) or not", "shape): self.set_shape(shape, init=True) @graph.setter def graph(self, graph): self._graph = Node.get_active_graph(graph)", "return mul(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "the graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove dependencies", "else other.__ne__(self) def __gt__(self, other): return gt(self, other, graph=self.graph) if", "where the node was defined self._stack = traceback.extract_stack(limit=1) @property def", "dependencies from the graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ =", "other, self, graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow, self, other,", "return slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv,", "def __enter__(self): Node._graph_stack.append(self) return self def __exit__(self, *args): assert self", "if node in context: raise ValueError(f\"duplicate value for node '{node}'\")", "RuntimeError(f\"Cannot find {name} in graph nodes. Graph: {self.graph}\") def __len__(self):", "= nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod =", "dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a name for the", "[key], graph=self) else: return self.nodes[key] def __add__(self, other): return add(self,", "`value`. kwargs : dict Keyword arguments passed to the `_evaluate`", "else other.__ge__(self) def __ne__(self, other): return ne(self, other, graph=self.graph) if", "return n def __bool__(self): return True def __hash__(self): return id(self)", "graph self._name = None self.name = name or uuid.uuid4().hex self._op_name", "len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if len(out_shape) > len(var.shape): for", "graph. \"\"\" return self._graph def preds(self): return self._preds def succs(self):", "slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def _evaluate(self,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self) def __floordiv__(self, other):", "def var(self): var, index_list = self.args return var def set_name(self,", "the context. \"\"\" # Evaluate all explicit dependencies first self.evaluate_dependencies(context,", "isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0: return self", "self._predeecessors = [] self._succesors = [] self.args = args if", "other): return slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self, other): return", "irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter", "# Remove dependencies from the graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103", "other, self, graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv, self, other,", "delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_", "_noop_callback with callback(self, context): if self.__class__.__name__ == \"Node\": context[self] =", "Get the stack context so we can report where the", "self.is_shape_finalized() and len(self.nodes) > 0: if isinstance(key, (int, Node)): key", "*args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): if len(args)", "elif isinstance(key, tuple): return var_index(self, list(key), graph=self) else: return var_index(self,", "**kwargs) @property def domain(self): return self.kwargs[\"domain\"] @property def var(self): var,", "nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ = nodeop(builtins.range)", "UNSET_SHAPE, DEFAULT_SHAPES import builtins import operator from collections import OrderedDict,", "and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains", "abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help) min_", "if the existing node is not equal to the current", "name, graph=self.graph) def __getitem__(self, key): if self.__class__.__name__ != \"Node\": if", "in [\"placeholder\", \"state\", \"input\", \"output\", \"temp\"] and not node.is_shape_finalized(): context[node]", "positional arguments passed to the target kwargs : dict keywoard", "Stop tracing at the last interactive cell if interactive and", "self, other, graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor, other, self,", "def __repr__(self): return \"<func_op '%s' target=%s args=<%d items>>\" % \\", "else other.__rmod__(self) def __rmod__(self, other): return mod(other, self, graph=self.graph) if", "False if isinstance(var, (Integral, Real, str)): var = np.asarray([var]) elif", "Node wrapper for stateless functions. Parameters ---------- target : callable", "val): return not isinstance(val, np.ndarray) or (len(val.shape) == 1 and", "= tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape = tuple([shape]) elif isinstance(shape,", "= [] for dim in shape: if isinstance(dim, (Node, Integral)):", "delitem = nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self) def __and__(self,", "node. If graph is `None`, this is the top-level graph.", "def __lshift__(self, other): return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other,", "node.op_name in [\"placeholder\", \"state\", \"input\", \"output\", \"temp\"] and not node.is_shape_finalized():", "__lshift__(self, other): return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "context, callback=None): \"\"\" Evaluate the dependencies of this node and", "the node. Returns ------- self : Node This node. Raises", "__radd__(self, other): return slice_op(operator.add, other, self, graph=self.graph) def __sub__(self, other):", "control_dependencies(dependencies, graph=None): \"\"\" Ensure that all `dependencies` are executed before", "slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self, other): return slice_op(operator.gt, self,", "tuple Positional arguments passed to the `_evaluate` method. name :", "kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs)", "is None: slice1_var, slice2_var = self.args if isinstance(slice1_var, (slice_op, var_index))", "node name. RuntimeError If `node` is an `Node` instance but", "slice1_idx = Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"):", "nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input)", "Parameters ---------- target : callable function to evaluate the node", "super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_", "of keyword arguments passed to `func`. \"\"\" return func(*args, **kwargs)", "\"<node '%s'>\" % self.name def add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key]", "value must be placeholder or integer value for {self.name}\\n\" f\"\\tDim:", "in fetches and c.op_name in [\"output\", \"state\", \"temp\"]: write_name =", "nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv)", "key: if isinstance(k, Node): name.append(k.name) else: name.append(str(k)) else: name.append(key) name", "self.args] @property def shape(self): \"\"\" tuple : Shape of the", "callback=None): \"\"\" Evaluate the dependencies of this node and discard", "domain(self): return self.kwargs[\"domain\"] def __getitem__(self, key): if isinstance(key, (tuple, list,", "the output for a node. This can be a tuple", "import contextlib import traceback import uuid import numpy as np", "other, graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub, other, self, graph=self.graph)", "graph=self.graph) def __repr__(self): return \"<var_index name=%s, index=%s>\" % (self.name, self.args)", "first self.evaluate_dependencies(context, callback) if self in context: return context[self] #", "= len(self) for i in range(num): yield self[i] def __eq__(self,", "**kwargs_inner, **kwargs) return _wrapper @nodeop def call(func, *args, **kwargs): \"\"\"", "= nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ =", "return self.__dict__ def __setstate__(self, data): self.__dict__.update(data) def set_name(self, name): \"\"\"", "= self.value = self._evaluate(*args, **kwargs) return self.value def _evaluate(self, *args,", "lt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "return tuple(partial(element) for element in node) if isinstance(node, list): return", "be found in the associated graph. \"\"\" # TODO: Need", "graph. \"\"\" name = name or uuid.uuid4().hex # TODO: Need", "other): return slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self, other): return", "@op_name.setter def op_name(self, op_name): if op_name: self._op_name = op_name elif", "an node. Returns ------- instantiated_node : Node Node instance. Raises", "def __rsub__(self, other): return sub(other, self, graph=self.graph) if not _is_node_type_instance(other,", "id(self) def func_hash(self): \"\"\" This returns the functional hash of", "mul = nodeop(operator.mul) ne = nodeop(operator.ne) neg = nodeop(operator.neg) not_", "ValueError(f\"Index list does not match {var.shape} in {self.var.name} - {self.var.op_name}\"", "# Get a list of all dependencies relevant to this", "var_index(self, [key], graph=self) else: return self.nodes[key] def __add__(self, other): return", "**kwargs) self.target = target @property def domain(self): return self.kwargs[\"domain\"] def", "slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain else:", "this graph. \"\"\" if isinstance(node, str): return self.nodes[node] if isinstance(node,", "= [] self.args = args if \"name\" in kwargs: kwargs.pop(\"name\")", "variable shape {self.var.shape}\") return True else: return self.var.shape == DEFAULT_SHAPES[0]", "Parameters ---------- graph : Node or None Graph to return", "and len(self.nodes) > 0: if isinstance(key, (int, Node)): key =", "evaluate the nodes. callback : callable or None Callback to", "shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim)) else: raise TypeError(f\"Shape value must", "= nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ =", "len(shape) > 0: self._shape = shape if isinstance(shape, tuple) else", "of the nodes given the context. Raises ------ ValueError If", "_flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain =", "all attributes and subgraphs of a node. \"\"\" return node_hash(self)", "\"index\")) else other.__radd__(self) def __radd__(self, other): return add(other, self, graph=self.graph)", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self) def __ge__(self, other):", "pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_", "variable used for indexing into. idx : tuple Tuple of", "self.kwargs[\"domain\"] @property def var(self): var, index_list = self.args return var", "of an node. Returns ------- instantiated_node : Node Node instance.", "f\"{self.name}{key}\" if name not in self.nodes.keys(): raise KeyError(f\"{name} not in", "shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex", "if isinstance(node, str): return self.nodes[node] if isinstance(node, Node): if node.name", "Rational, Real import contextlib import traceback import uuid import numpy", "list Sequence of nodes to be evaluted before evaluating any", "in the associated graph. \"\"\" name = name or uuid.uuid4().hex", "(n, stack) messages.append(message) raise ex from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls,", "slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self, other): return slice_op(operator.and_, other,", "scope_names.append(cgraph.name) cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return", "slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var, (slice_op,", "isinstance(key, (list)): return var_index(self, key, graph=self) elif isinstance(key, tuple): return", "return matmul(other, self, graph=self.graph) def __mul__(self, other): return mul(self, other,", "self, graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv, self, other, graph=self.graph)", "return call(self, *args, **kwargs) def __repr__(self): return \"<func_op '%s' target=%s", "**kwargs): return self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\" Failed to evaluate", ": Shape of the output for a node. This can", "a mapping.\") nodes = list(context) # Add the keyword arguments", "so we can report where the node was defined self._stack", "else other.__rsub__(self) def __rsub__(self, other): return sub(other, self, graph=self.graph) if", "return ret else: if isinstance(key, (list)): ret = var_index(self.var, tuple(key),", "+ \"]\" if name in self.graph.nodes: return self.graph.nodes[name] elif isinstance(key,", "self._shape = shape if isinstance(shape, tuple) else tuple(shape) else: for", "instances. Raises ------ ValueError If the context specifies more than", "= var_index(self.var, key, graph=self) else: ret = var_index(self.var, tuple([key]), graph=self)", "len(args) == 0: n = cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies,", "c.write_count > 0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values =", "and not fname.startswith('<'): break # pragma: no cover interactive =", "aa in list(kwargs.keys()): if aa in self.added_attrs: kwargs.pop(aa) return self.target(*args,", "self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs): \"\"\" Decorator for creating nodes", "of nodes to be evaluted before evaluating any nodes defined", "nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview)", "tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return", "other): return matmul(self, other, graph=self.graph) def __rmatmul__(self, other): return matmul(other,", "[partial(arg) for arg in self.args] kwargs = {key: partial(value) for", "if name in self.graph.nodes: return self.graph.nodes[name] elif isinstance(key, (list)): return", "@graph.setter def graph(self, graph): self._graph = Node.get_active_graph(graph) @property def gname(self):", "of the output for a node. This can be a", "return \"<slice_%s '%s'>\" % (self.target.__name__, self.name) class func_op(Node): # pylint:", "if isinstance(a, Node) else a for a in self.args] @property", "shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) else: n = cls(*args,", "f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper", "return self @property def domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None,", "self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New:", "other): return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "Node): if node.name not in self.nodes and (node.graph != self):", "= nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ =", "= False else: raise ValueError(\"`fetches` must be an `Node` instance,", "Use :code:`context=context.copy()` to avoid the context being modified. Parameters ----------", "ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_", "\"index\")) else other.__rrshift__(self) def __rrshift__(self, other): return rshift(other, self, graph=self.graph)", "bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_", "key, value): self.added_attrs.append(key) self.kwargs[key] = value def is_shape_finalized(self): if self.shape", "key, graph=self) else: ret = var_index(self.var, tuple([key]), graph=self) return ret", "rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "(\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self) def __and__(self, other): return and_(self,", "node args : tuple positional arguments passed to the target", "nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash)", "not an `Node` instance or node name\") def instantiate_graph(self, context,", "or index/index_op nodes. \"\"\" def __init__(self, target, *args, **kwargs): if", "this function to evaluate the node. \"\"\" return self(*args, context,", "= op_name elif self.__class__.__name__ == \"Node\": self._op_name = self.name else:", "than the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode", "kwargs.pop(\"domain\") else: domain = Domain(idx) super(var_index, self).__init__(var, idx, name=name, domain=domain,", "func_hash(self): \"\"\" This returns the functional hash of a particular", "**kwargs): \"\"\" Evaluate an node or constant given a context.", "def __repr__(self): return \"<var_index name=%s, index=%s>\" % (self.name, self.args) class", "for fetch in fetches] return values[0] if single else tuple(values)", "or an node name. RuntimeError If `node` is an `Node`", "graph=self) elif isinstance(key, tuple): return var_index(self, list(key), graph=self) else: return", "name.append(key.name) elif hasattr(key, \"__len__\") and not isinstance(key, str): for k", "def __getitem__(self, key): return self @property def domain(self): return self.kwargs[\"domain\"]", "axis=i) else: var = np.squeeze(var) if len(var.shape) != len(out_shape) and", "callback=None): \"\"\" Evaluate the node given a context. Parameters ----------", "staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_", "if \"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args,", "hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_", "rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub = nodeop(operator.sub) truediv", "\"\"\" tuple : Shape of the output for a node.", "return self.shape[0] def __iter__(self): num = len(self) for i in", "in self.nodes.keys(): raise KeyError(f\"{name} not in {self.name} keys:\\n\" f\"Node keys:", "graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv, self, other, graph=self.graph) def", "nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars)", "\"var_index\", \"index\")) else other.__truediv__(self) def __rtruediv__(self, other): return truediv(other, self,", "\"output\", \"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) for name,", "object id, whereas this function returns a hash of all", "= nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ =", "------- instantiated_node : Node Node instance. Raises ------ ValueError If", "whose keys are node instances. Raises ------ ValueError If the", "evaluated. \"\"\" for node in self.dependencies: node.evaluate(context, callback) def evaluate(self,", "(\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self) def __rlshift__(self, other): return lshift(other,", "name in g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot find {name} in", "if self.graph: graph = self.graph if self._name and self._name in", "slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift, other,", "self.args) class slice_op(Node): \"\"\" Node representing multi-dimensional operations performed on", "to `func`. kwargs : dict Mapping of keyword arguments passed", "isinstance(n, Node): for frame in reversed(n._stack): # pylint: disable=protected-access #", "nodeop(operator.ge) getitem = nodeop(operator.getitem) gt = nodeop(operator.gt) index = nodeop(operator.index)", "= tuple([shape]) elif not shape or len(shape) == 0: #", "evaluated when an node is evaluated. Returns ------- value :", "\"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs = [] # TODO: CHange", "name or uuid.uuid4().hex # TODO: Need a way to check", "= np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return ret else:", ">= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name}", "np import importlib from .graph import Graph from .domain import", "Node or None Graph to return or `None` to use", "If the current name of the node cannot be found", "\"input\", \"output\", \"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) return", "= [partial(arg) for arg in self.args] kwargs = {key: partial(value)", "= slice1_var.domain else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index))", "for i in range(num): yield self[i] def __eq__(self, other): return", "The default hash returns an object id, whereas this function", "or str Node instance or name of an node. Returns", "if k == old_key else (k, self.nodes[k]), self.nodes.keys())) self.nodes =", "False return True def set_shape(self, shape=None, init=False): if isinstance(shape, float):", "{list(self.nodes.keys())}\") ret = self.nodes[name] return ret else: name = []", "functionality. value : Any or None If a node has", "return and_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "return le(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "node. Raises ------ ValueError If an node with `value` already", "slice1_idx, slice2_var, slice2_idx def __add__(self, other): return slice_op(operator.add, self, other,", "node_hash, \\ _is_node_type_instance, is_iterable class Node(object): \"\"\" Base class for", "len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if len(out_shape) >", "= True else: out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single", "fetches, context=None, *, callback=None, **kwargs): \"\"\" Evaluate one or more", "raise ValueError(f\"Invalid shape var for var index {self} with variable", "[fetches] single = True elif isinstance(fetches, Sequence): single = False", "currently active graph instance by returning the explicitly given graph", "tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape = tuple([shape]) elif isinstance(shape, Node):", "# This is called when the decorator is used without", "= nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ =", "dependencies is None else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose", "other, graph=self.graph) def __gt__(self, other): return slice_op(operator.gt, self, other, graph=self.graph)", "isinstance(key, tuple): return var_index(self, list(key), graph=self) else: return var_index(self, [key],", "in self.args] kwargs = {key: partial(value) for key, value in", "self def __exit__(self, *args): assert self == Node._graph_stack.pop() def __repr__(self):", "node in fetches] context = self.instantiate_graph(context, **kwargs) for c in", "tuple(shape) else: for idx, d in enumerate(self.domain.dom_set): if shape and", "finalized\" to work self._shape = UNSET_SHAPE else: shapes = []", "(\"slice_op\", \"var_index\", \"index\")) else other.__le__(self) def __invert__(self): return inv(self, graph=self.graph)", "self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None and", "var_index(Node): # pylint: disable=C0103,W0223 \"\"\" Node representing values of a", "return slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self, other): return slice_op(operator.ge,", "\"var_index\", \"index\")) else other.__rmod__(self) def __rmod__(self, other): return mod(other, self,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self) def __radd__(self, other):", "Node._graph_stack[-1] return graph def instantiate_node(self, node): # pylint:disable=W0621 \"\"\" Instantiate", "of this node. If graph is `None`, this is the", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self) def __truediv__(self, other):", "__init__(self, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): self.nodes", "list of all dependencies relevant to this node self.dependencies =", "return context[self] # Evaluate the parents partial = functools.partial(self.evaluate_node, context=context,", "= nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ =", "---------- target : callable function to evaluate the node args", "and isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d)) elif isinstance(d,", "importlib from .graph import Graph from .domain import Domain from", "information keyed by variable name. Returns ------- values : Node", "= self.nodes.item_by_index(idx) return ret else: if isinstance(key, (list)): ret =", "(not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] ==", "graph=self.graph) def update_graph_key(self, old_key, new_key): n = list(map(lambda k: (new_key,", "else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback)", "self, other, graph=self.graph) def __repr__(self): return \"<var_index name=%s, index=%s>\" %", "indices = self.domain.compute_pairs() single = False if isinstance(var, (Integral, Real,", "isinstance(k, Node): name.append(k.name) else: name.append(str(k)) else: name.append(key) name = self.var.name", "Remove dependencies from the graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_", "else: name.append(str(k)) else: name.append(key) name = self.var.name + \"[\" +", "__or__(self, other): return slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self, other):", "for idx, d in enumerate(self.domain.dom_set): if shape and isinstance(shape[idx], (func_op,", "nodeop(builtins.set) add = nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter)", "self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if", "other): return mul(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "self, other, graph=self.graph) def __rand__(self, other): return slice_op(operator.and_, other, self,", "return xor(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "len(shape) == 0: # TODO: Change in order to enable", "return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs): if", "raise RuntimeError(f\"Cannot find {name} in graph nodes. Graph: {self.graph}\") def", "Integral) for sv in shape]) and len(self.domain) == np.product(shape) and", "bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_", "\"index\")) else other.__floordiv__(self) def __mod__(self, other): return mod(self, other, graph=self.graph)", "isinstance(node, list): return [partial(element) for element in node] if isinstance(node,", "slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain else: slice2_idx =", "= nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem =", "add_predecessor(self, pred): if isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self,", "without arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner): return func_op(target, *args, **kwargs_inner,", "def __mod__(self, other): return slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self,", "other, graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod, other, self, graph=self.graph)", "or names. kwargs : dict[str, object] Additional context information keyed", "f\" {node.graph}\") return node raise ValueError(f\"'{node}' is not an `Node`", "{self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret = self.nodes[name] return ret", "= [] # TODO: CHange this to underscore private variable", "when the node is executed. args : list Sequence of", "else: self._predecessors.append(pred) def add_successor(self, succ): if isinstance(succ, Node): self._succesors.append(succ.gname) else:", "other): return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "this to underscore private variable self.kwargs = kwargs self.graph =", "str Node instance or name of an node. Returns -------", "self._evaluate(*args, **kwargs) return self.value def _evaluate(self, *args, context=None, **kwargs): \"\"\"", "the `_evaluate` method. \"\"\" _graph_stack = deque([None]) _eval_stack = []", "callable or None Callback to be evaluated when an node", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self) def __mod__(self, other):", "**kwargs) return _wrapper @nodeop def call(func, *args, **kwargs): \"\"\" Call", "or None Graph to return or `None` to use the", "when an node is evaluated. Returns ------- value : object", "Parameters ---------- fetches : list[str or Node] or str or", "isinstance(node, Node): if node.name not in self.nodes and (node.graph !=", "= nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ =", "modifies the context in place. Use :code:`context=context.copy()` to avoid the", "instance. Raises ------ ValueError If `node` is not an `Node`", "if isinstance(x, np.ndarray) else x, indices)) res = var[indices] if", "in self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs) def __call__(self, *args, **kwargs):", "@classmethod def evaluate_node(cls, node, context, **kwargs): \"\"\" Evaluate an node", "modified. Parameters ---------- context : dict[Node or str, object] Context", "must be placeholder or integer value for {self.name}\\n\" f\"\\tDim: {dim}\"", "nodeop(operator.not_) or_ = nodeop(operator.or_) pos = nodeop(operator.pos) rshift = nodeop(operator.rshift)", "= slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain else: slice2_idx", "corresponding to input index values. Parameters ---------- var : Node", "isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif len(args) == 2: all_args =", "node or constant given a context. \"\"\" Node.evaluated_nodes += 1", "other): return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_", ">= np.prod(self.shape): if isinstance(key, Integral): key = tuple([key]) idx =", "def __call__(self, *args, **kwargs): return self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\"", "graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift, self, other, graph=self.graph) def", "= self.graph if self._name and self._name in graph.nodes: graph.update_graph_key(self._name, name)", "and cannot be sliced for {self.name}\") elif isinstance(var, list): var", "return True def __hash__(self): return id(self) def func_hash(self): \"\"\" This", "(node.name, node)) self.nodes = Graph(node_list) def __call__(self, *args, **kwargs): return", "slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if", "an node name. RuntimeError If `node` is an `Node` instance", "self, graph=self.graph) def __and__(self, other): return slice_op(operator.and_, self, other, graph=self.graph)", "self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs) def __call__(self, *args, **kwargs): return", "truediv = nodeop(operator.truediv) truth = nodeop(operator.truth) xor = nodeop(operator.xor) import_", "\"Failed to evaluate node `%s` defined at:\\n\\n%s\" % (n, stack)", "\"index\")) else other.__rmod__(self) def __rmod__(self, other): return mod(other, self, graph=self.graph)", "self._shape = UNSET_SHAPE else: shapes = [] for dim in", "= nodeop(builtins.int) open_ = nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ =", "{self.name} has indices which are greater than the variable shape:\\n\"", "(len(val.shape) == 1 and val.shape[0] == 1)): if self.var.shape !=", "Integral): return False return True def set_shape(self, shape=None, init=False): if", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self) def __ror__(self, other):", "the node functionality. value : Any or None If a", "in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\"", "evaluated when an node is evaluated. \"\"\" for node in", "if isinstance(var, (Integral, Real, str)): var = np.asarray([var]) elif not", "{self}') return self.shape[0] def __iter__(self): num = len(self) for i", "name = self.var.name + \"[\" + \"][\".join(name) + \"]\" if", "list(key), name=name, graph=self.graph) else: return var_index(self, [key], name=name, graph=self.graph) def", "to the current ndoe as ewll if self.graph and name", "Choose a name for the node and add the node", "an `Node` instance or an node name. RuntimeError If `node`", "= callback or _noop_callback with callback(self, context): if self.__class__.__name__ ==", "if isinstance(n, Node): for frame in reversed(n._stack): # pylint: disable=protected-access", "if isinstance(self.args[0], Node) else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1],", "or_ = nodeop(operator.or_) pos = nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem", "either integer values or index/index_op nodes. \"\"\" def __init__(self, var,", "keywoard arguments passed to the target \"\"\" def __init__(self, target,", "__getitem__(self, key): if self.__class__.__name__ != \"Node\": if isinstance(key, (slice, Integral)):", "= \"Failed to evaluate node `%s` defined at:\\n\\n%s\" % (n,", "isinstance(arg, Node): if self.__class__.__name__ == \"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg)", "for sv in shape]) and len(self.domain) == np.product(shape) and len(shape)", "graph=self) return ret def is_scalar(self, val=None): if val is not", "graph=self.graph) def __ror__(self, other): return slice_op(operator.or_, other, self, graph=self.graph) def", "be a tuple of integers or parameter node names. \"\"\"", "True else: out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single =", "\"\"\" Set the name for a variable index, making sure", "getitem = nodeop(operator.getitem) gt = nodeop(operator.gt) index = nodeop(operator.index) indexOf", "are node instances or names. kwargs : dict[str, object] Additional", "tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format) len_", "and not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val", "self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target @property def", "as np import importlib from .graph import Graph from .domain", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __rpow__(self, other):", "!= DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)): raise", "Node The multi-dimensional variable used for indexing into. idx :", "shape or len(shape) == 0: # TODO: Change in order", "key not in self.added_attrs} # Evaluate the node callback =", "return slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift,", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__and__(self) def", "tuple([key]), graph=self) return ret def is_scalar(self, val=None): if val is", "**kwargs_inner): return func_op(target, *args, **kwargs_inner, **kwargs) return _wrapper @nodeop def", "the explicitly given graph or using the default graph. Parameters", "(1,) and len(indices) == 1: res = res[0] self.domain.set_computed(out_shape, indices)", "\"index\")) else other.__lshift__(self) def __rshift__(self, other): return rshift(self, other, graph=self.graph)", "= self.args if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx", "= nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains = nodeop(operator.contains) countOf =", "kwargs : dict Additional context information keyed by variable name.", "\"\"\" Instantiate a graph by replacing all node names with", "Evaluate the node callback = callback or _noop_callback with callback(self,", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __rpow__(self, other): return", "to underscore private variable self.kwargs = kwargs self.graph = graph", "init=False): s = [] assert isinstance(shape, (tuple, list)) if all([isinstance(sv,", "\"\"\" # This is called when the decorator is used", "for element in node] if isinstance(node, dict): return {partial(key): partial(value)", "if isinstance(key, (list)): ret = var_index(self.var, tuple(key), graph=self) elif isinstance(key,", "sv in shape]) and len(self.domain) == np.product(shape) and len(shape) >", "update the graph. Parameters ---------- value : str Unique name", "if name in g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot find {name}", "\"var_index\", \"index\")) else other.__add__(self) def __sub__(self, other): return sub(self, other,", "self.set_name(name) @args.setter def args(self, args): new_args = [] for arg", "def __lt__(self, other): return slice_op(operator.lt, self, other, graph=self.graph) def __le__(self,", "return self._preds def succs(self): return self._preds def add_predecessor(self, pred): if", "node. callback : callable or None Callback to be evaluated", "name '{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph:", "(\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self) def __rmod__(self, other): return mod(other,", "import _noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable class Node(object): \"\"\"", "{self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if not single and not all([(idx_val", "positional arguments passed to `func`. kwargs : dict Mapping of", "= nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ =", "KeyError(f\"{name} not in {self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret =", "cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return self", "Node._graph_stack.append(self) return self def __exit__(self, *args): assert self == Node._graph_stack.pop()", "if interactive and not fname.startswith('<'): break # pragma: no cover", "integers or parameter node names. \"\"\" return self._shape @property def", "a sequence thereof. \"\"\" if isinstance(fetches, (str, Node)): fetches =", "if self.is_scalar(var): out_shape = (1,) indices = (0,) single =", "are greater than the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape:", "value in node.items()} if isinstance(node, slice): return slice(*[partial(getattr(node, attr)) for", "context whose keys are node instances. Raises ------ ValueError If", "== 1: var = np.expand_dims(var, axis=i) else: var = np.squeeze(var)", "key): if isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0:", "\"][\".join(name) + \"]\" if name in self.graph.nodes: return self.graph.nodes[name] elif", "keyword arguments `kwargs`. Parameters ---------- func : callable Function to", "Ensure that all `dependencies` are executed before any nodes in", "return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<var_index name=%s,", "= [] self._succesors = [] self.args = args if \"name\"", "var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple): ret = var_index(self.var, key,", "class Node(object): \"\"\" Base class for nodes. Parameters ---------- args", "node. Returns ------- instantiated_node : Node Node instance. Raises ------", "node.is_shape_finalized(): context[node] = node.evaluate(context) for name, value in kwargs.items(): node", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self) def __gt__(self, other):", "sliced for {self.name}\") elif isinstance(var, list): var = np.asarray(var) if", "var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"):", "__exit__(self, *args): assert self == Node._graph_stack.pop() def __repr__(self): return \"<node", "with a unique stringwhich corresponds to the variable, index combination.", "= name or uuid.uuid4().hex self._op_name = None self.op_name = op_name", "and not node.is_shape_finalized(): context[node] = node.evaluate(context) return context def run(self,", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self) def", "messages = [] interactive = False if isinstance(node, Node) or", "self.shape: if not isinstance(s, Integral): return False return True def", "isinstance(key, str): for k in key: if isinstance(k, Node): name.append(k.name)", "a tuple of integers or parameter node names. graph :", "\"\"\" # Evaluate all explicit dependencies first self.evaluate_dependencies(context, callback) if", "[a.name if isinstance(a, Node) else a for a in self.args]", "Failed to evaluate an node. \"\"\" class var_index(Node): # pylint:", "__sub__(self, other): return sub(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "evaluate_node(cls, node, context, **kwargs): \"\"\" Evaluate an node or constant", "res = res[0] self.domain.set_computed(out_shape, indices) return res def __add__(self, other):", "def add_successor(self, succ): if isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def", "Node This node. Raises ------ ValueError If an node with", "to be evaluated when an node is evaluated. Returns -------", "index/index_op nodes. \"\"\" def __init__(self, var, idx, name=None, **kwargs): #", "__rxor__(self, other): return slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self, other):", "graph def instantiate_node(self, node): # pylint:disable=W0621 \"\"\" Instantiate nodes by", "= cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return self def", "------ ValueError If no `Graph` instance can be obtained. \"\"\"", "other.__rtruediv__(self) def __floordiv__(self, other): return floordiv(self, other, graph=self.graph) if not", "return ge(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self) def __and__(self, other): return", "if \"op_name\" in kwargs \\ else f\"{target.__name__}\" if \"domain\" in", "(self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs): \"\"\" Decorator for creating", "= nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ =", "ret = self.nodes[name] return ret else: name = [] if", ": dict Keyword arguments passed to the `_evaluate` method. \"\"\"", "context. Raises ------ ValueError If `fetches` is not an `Node`", "to work self._shape = UNSET_SHAPE else: shapes = [] for", "at the last interactive cell if interactive and not fname.startswith('<'):", "node_list.insert(idx, (node.name, node)) self.nodes = Graph(node_list) def __call__(self, *args, **kwargs):", "self.value def _evaluate(self, *args, context=None, **kwargs): \"\"\" Inheriting nodes should", "the target \"\"\" def __init__(self, target, *args, **kwargs): kwargs[\"op_name\"] =", "self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self): for e in self.args: self.add_predecessor(e)", "with arguments if target is None: return functools.partial(nodeop, **kwargs) #", "idx, name=name, domain=domain, **kwargs) @property def domain(self): return self.kwargs[\"domain\"] @property", "(\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self) def __rxor__(self, other): return xor(other,", "nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add = nodeop(operator.add)", "ValueError(f\"duplicate name '{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if", "arg0_dom = self.args[0].domain arg1_dom = self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if", "\"index\")) else other.__or__(self) def __xor__(self, other): return xor(self, other, graph=self.graph)", "[] # TODO: CHange this to underscore private variable self.kwargs", "truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "disable=C0103,W0223 \"\"\" Node representing values of a variable corresponding to", "Returns ------- value : object Output of the node given", "raise TypeError(f'`shape` must be specified explicitly for nodes {self}') return", "other): return slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self, other): return", "nodeop(operator.truediv) truth = nodeop(operator.truth) xor = nodeop(operator.xor) import_ = nodeop(importlib.import_module)", "# pylint: disable=C0103,R0903 \"\"\" Node wrapper for stateless functions. Parameters", "in nodes: # pylint:disable=W0621 value = context.pop(node) node = self.instantiate_node(node)", "not single and not all([(idx_val - 1) >= indices[-1][idx] for", "= nodeop(operator.invert) ior = nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift =", "node. This can be a tuple of integers or parameter", "None and slice2_var is None: slice1_var, slice2_var = self.args if", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self) def __rmul__(self, other):", "if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args) slice1_var,", "__invert__(self): return inv(self, graph=self.graph) def __neg__(self): return neg(self, graph=self.graph) def", "nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec)", "\"index\")) else other.__rmul__(self) def __rmul__(self, other): return mul(other, self, graph=self.graph)", "pos(self, graph=self.graph) def __reversed__(self): return reversed_(self, graph=self.graph) def update_graph_key(self, old_key,", "before any nodes in this scope. Parameters ---------- dependencies :", "whereas this function returns a hash of all attributes and", "other.__lshift__(self) def __rshift__(self, other): return rshift(self, other, graph=self.graph) if not", "__rlshift__(self, other): return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "if aa in self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs) def __call__(self,", "= nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv =", "= nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ =", "= node.evaluate(context) for name, value in kwargs.items(): node = self.nodes[name]", "an node is evaluated. Returns ------- value : object Output", "= tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: domain =", "to be evaluated when an node is evaluated. \"\"\" for", "or None If a node has a default value to", "context specifies more than one value for any node. ValueError", "= self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([]))", "{self.args}\") if self.graph: graph = self.graph if self._name and self._name", "is not None and self._name in graph.nodes: graph.update_graph_key(self._name, name) else:", "if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list)", "\"\"\" return self._op_name @op_name.setter def op_name(self, op_name): if op_name: self._op_name", "use the default graph. Raises ------ ValueError If no `Graph`", "= nodeop(operator.ge) getitem = nodeop(operator.getitem) gt = nodeop(operator.gt) index =", "else: if isinstance(key, (list)): ret = var_index(self.var, tuple(key), graph=self) elif", "slice_op(operator.and_, other, self, graph=self.graph) def __or__(self, other): return slice_op(operator.or_, self,", "graph. op_name : str Operation name which describes the node", "= self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([]) op1 = np.asarray(list(map(lambda", "@property def argnames(self): return [a.name if isinstance(a, Node) else a", "def set_edges(self): for e in self.args: self.add_predecessor(e) if isinstance(e, Node):", "context[self] = self.value = self._evaluate(*args, **kwargs) return self.value def _evaluate(self,", "def __truediv__(self, other): return slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self,", "other.__ror__(self) def __ror__(self, other): return or_(other, self, graph=self.graph) if not", "graph=self.graph) def __radd__(self, other): return slice_op(operator.add, other, self, graph=self.graph) def", "self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\" Failed to evaluate an node.", "if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape) if out_shape", "other): return slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self, other): return", "i in range(num): yield self[i] def __eq__(self, other): return hash(self)", "exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_", "Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node, context=context, **kwargs) if", "node to the graph self._name = None self.name = name", "else: n = cls(*args, name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value,", "the context. Raises ------ ValueError If `fetches` is not an", "node given the context. \"\"\" # Evaluate all explicit dependencies", "id, whereas this function returns a hash of all attributes", "domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is", "hash returns an object id, whereas this function returns a", "graph=self.graph) def __rand__(self, other): return slice_op(operator.and_, other, self, graph=self.graph) def", "= nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ =", "defined in this scope. \"\"\" # Add dependencies to the", "float): shapes.append(int(dim)) else: raise TypeError(f\"Shape value must be placeholder or", "evaluate(self, context, callback=None): \"\"\" Evaluate the node given a context.", "= nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv) invert =", "to enable \"is shape finalized\" to work self._shape = UNSET_SHAPE", "else: s.append(d) self._shape = tuple(s) def is_scalar(self, val): return not", "\"\"\" str : Operation name which describes the node functionality.", "callback = callback or _noop_callback with callback(self, context): if self.__class__.__name__", "slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow, self,", "context is None: context = {} elif not isinstance(context, Mapping):", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self) def __ge__(self, other): return", "1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index", "context.pop(node) node = self.instantiate_node(node) if node in context: raise ValueError(f\"duplicate", "itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift", "def add_predecessor(self, pred): if isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def", "def __le__(self, other): return slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self,", "self.value = self._evaluate(*args, **kwargs) return self.value def _evaluate(self, *args, context=None,", "the context being modified. Parameters ---------- fetches : list[str or", "other, graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul, other, self, graph=self.graph)", "nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ = nodeop(builtins.min)", "nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod)", "{var.shape} in {self.var.name} - {self.var.op_name}\" f\"dimensions for slice {self.args[0].name} with", "node.evaluate(context, callback) def evaluate(self, context, callback=None): \"\"\" Evaluate the node", "else: name.append(key) name = self.var.name + \"[\" + \"][\".join(name) +", "**kwargs) return n def __bool__(self): return True def __hash__(self): return", "context def run(self, fetches, context=None, *, callback=None, **kwargs): \"\"\" Evaluate", "self.name def add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key] = value def", "dict or None Context in which to evaluate the nodes.", "{self.graph}\") def __len__(self): #TODO: Update this to check for finalzied", "Node)): fetches = [fetches] single = True elif isinstance(fetches, Sequence):", "\"index\")) else other.__ror__(self) def __ror__(self, other): return or_(other, self, graph=self.graph)", "fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key):", "None and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] =", "items>>\" % \\ (self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs): \"\"\"", "else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a name for", "add the node to the graph self._name = None self.name", "= tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\" Obtain the currently active", "Do not capture any internal stack traces fname = frame.filename", "this to check for finalzied shape if self.shape == UNSET_SHAPE:", "__radd__(self, other): return add(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "= nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add =", "= value def is_shape_finalized(self): if self.shape == UNSET_SHAPE: return False", "value = context.pop(node) node = self.instantiate_node(node) if node in context:", "= nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ =", "var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain else: slice2_idx =", "other): return slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self, other): return", "self.graph.nodes[name] elif isinstance(key, (list)): return var_index(self, key, name=name, graph=self.graph) elif", "nodeop(operator.indexOf) inv = nodeop(operator.inv) invert = nodeop(operator.invert) ior = nodeop(operator.ior)", "return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return self def __exit__(self, *args):", "np.prod(self.shape): if isinstance(key, Integral): key = tuple([key]) idx = np.ravel_multi_index(key,", "or constant given a context. \"\"\" Node.evaluated_nodes += 1 try:", "= Graph(n) def insert_node(self, node, idx): node_list = list(self.nodes.items()) node_list.insert(idx,", "shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) return n def __bool__(self):", "def op_name(self, op_name): if op_name: self._op_name = op_name elif self.__class__.__name__", "cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) else: n =", "enumerate(self.domain.dom_set): if shape and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif shape", "self def evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate the dependencies of", "True elif isinstance(fetches, Sequence): single = False else: raise ValueError(\"`fetches`", "import Domain from .util import _noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance,", "len(self) for i in range(num): yield self[i] def __eq__(self, other):", "\") self._shape = tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\" Obtain the", "nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice)", "ex from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args, name=None, shape=None, graph=None,", "self.domain.compute_pairs() single = False if isinstance(var, (Integral, Real, str)): var", "operator from collections import OrderedDict, Mapping, Sequence, deque import functools", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __matmul__(self,", "def __getstate__(self): return self.__dict__ def __setstate__(self, data): self.__dict__.update(data) def set_name(self,", "self.args if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx =", "other.__rmul__(self) def __rmul__(self, other): return mul(other, self, graph=self.graph) if not", "`_evaluate` method. \"\"\" _graph_stack = deque([None]) _eval_stack = [] stack_size", "all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self)", "= nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ = nodeop(builtins.len) property_ =", "return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "or tuple[object] Output of the nodes given the context. Raises", "memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add = nodeop(operator.add) and_", "return tuple(self._args) @property def argnames(self): return [a.name if isinstance(a, Node)", "if self.is_scalar(op1) or self.is_scalar(op2): value = self.target(op1, op2) else: arg0_dom", "other): return slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self, other): return", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self) def", "{partial(key): partial(value) for key, value in node.items()} if isinstance(node, slice):", "shape if isinstance(shape, tuple) else tuple(shape) else: for idx, d", "or `None` to use a random, unique identifier. shape :", "other): return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var,", "\"\"\" Call `func` with positional arguments `args` and keyword arguments", "Callback to be evaluated when an node is evaluated. Returns", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self) def __truediv__(self, other): return", "return slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub,", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self) def __rxor__(self,", "random, unique identifier. shape : tuple or None Shape of", "node given a context. Parameters ---------- context : dict Normalised", "{self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if not", "name. Returns ------- normalized_context : dict[Node, object] Normalized context whose", "evaluate an node. \"\"\" class var_index(Node): # pylint: disable=C0103,W0223 \"\"\"", "1 and val.shape[0] == 1) def _evaluate(self, op1, op2, context=None,", "= nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ =", "self.added_attrs = [] # TODO: CHange this to underscore private", "other): return add(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "= nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ =", "return slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul,", "= nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ =", "True def set_shape(self, shape=None, init=False): if isinstance(shape, float): self._shape =", "= nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ =", "a mapping. \"\"\" if context is None: context = {}", "Node._graph_stack.pop() def __repr__(self): return \"<node '%s'>\" % self.name def add_attribute(self,", "[] self._succesors = [] self.args = args if \"name\" in", "arguments passed to the target kwargs : dict keywoard arguments", "@property def args(self): \"\"\" tuple : Positional arguments which are", "if key not in self.added_attrs} # Evaluate the node callback", "Graph: {self.graph}\") def __len__(self): #TODO: Update this to check for", "(\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self) def __gt__(self, other): return gt(self,", "performed on a node. Parameters ---------- target : cal The", "reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_", "node cannot be found in the associated graph. \"\"\" #", "\"Node\": if isinstance(key, (slice, Integral)): return getitem(self, key, graph=self.graph) else:", "self, graph=self.graph) def __lt__(self, other): return slice_op(operator.lt, self, other, graph=self.graph)", "(\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self) def __le__(self, other): return le(self,", "all explicit dependencies first self.evaluate_dependencies(context, callback) if self in context:", "node functionality. value : Any or None If a node", "nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod)", "shape=None, init=False): if isinstance(shape, float): self._shape = tuple([np.int(shape)]) elif isinstance(shape,", "indices, **kwargs): if self.is_scalar(var): out_shape = (1,) indices = (0,)", "list, np.ndarray)) and len(key) == 0: return self elif self.is_shape_finalized()", "return _wrapper @nodeop def call(func, *args, **kwargs): \"\"\" Call `func`", "1 and val.shape[0] == 1)): if self.var.shape != DEFAULT_SHAPES[0] and", "methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul = nodeop(operator.mul) ne", "other): return slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self, other): return", "gt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list)", "= self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args) @shape.setter def shape(self, shape):", "Node representing values of a variable corresponding to input index", "self, graph=self.graph) def __sub__(self, other): return slice_op(operator.sub, self, other, graph=self.graph)", "shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\") indices", "def run(self, fetches, context=None, *, callback=None, **kwargs): \"\"\" Evaluate one", "\"index\")) else other.__rtruediv__(self) def __floordiv__(self, other): return floordiv(self, other, graph=self.graph)", "other.__ge__(self) def __ne__(self, other): return ne(self, other, graph=self.graph) if not", "self.__class__.__name__ != \"Node\": if isinstance(key, (slice, Integral)): return getitem(self, key,", "list) else kwargs.pop(\"domain\") else: domain = Domain(idx) super(var_index, self).__init__(var, idx,", "% (self.target.__name__, self.name) class func_op(Node): # pylint: disable=C0103,R0903 \"\"\" Node", "self(*args, context, **kwargs) @classmethod def evaluate_node(cls, node, context, **kwargs): \"\"\"", "other, graph=self.graph) def __le__(self, other): return slice_op(operator.lt, other, self, graph=self.graph)", "in g.nodes: g = g.graph if name in g.nodes: return", "nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval)", "f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\")", "specified explicitly for nodes {self}') return self.shape[0] def __iter__(self): num", "target=%s args=<%d items>>\" % \\ (self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None,", "explicitly for nodes {self}') return self.shape[0] def __iter__(self): num =", "id_ = nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted) ascii_", "# pylint: disable=protected-access # Do not capture any internal stack", "work self._shape = UNSET_SHAPE else: shapes = [] for dim", "If `node` is an `Node` instance but does not belong", "domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name =", "isinstance(x, np.ndarray) else x, indices)) res = var[indices] if single", "= [] self._args = [] self._predeecessors = [] self._succesors =", "= nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ =", "if isinstance(key, Integral): key = tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape,", "np.prod(var.shape) == np.prod(out_shape): if len(out_shape) > len(var.shape): for i in", "graph=self) else: return self.nodes[key] def __add__(self, other): return add(self, other,", "nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int)", "self._op_name = None self.op_name = op_name # Get the stack", "idx, d in enumerate(self.domain.dom_set): if shape and isinstance(shape[idx], (func_op, Integral)):", "if len(key) != len(self.shape): raise KeyError(f\"Invalid key shape for {self.name}:\\n\"", "= tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: all_args =", "while cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self):", "of node names with their values. .. note:: This function", "def __or__(self, other): return slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self,", "self.op_name = op_name # Get the stack context so we", "Tuple of either integer values or index/index_op nodes. \"\"\" def", "tuple(key), graph=self) elif isinstance(key, tuple): ret = var_index(self.var, key, graph=self)", "\"var_index\", \"index\")) else other.__gt__(self) def __le__(self, other): return le(self, other,", "the keyword arguments for node in nodes: # pylint:disable=W0621 value", "else: arg0_dom = self.args[0].domain arg1_dom = self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom)", "node.evaluate(context) for name, value in kwargs.items(): node = self.nodes[name] if", "functions. \"\"\" # This is called when the decorator is", "which describes the node functionality. value : Any or None", "= list(map(lambda k: (new_key, self.nodes[k]) if k == old_key else", "__call__(self, *args, **kwargs): return self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\" Failed", "(\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self) def __pow__(self, other): return pow_(self,", "@property def domain(self): return Domain(tuple([])) @property def args(self): \"\"\" tuple", "return lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "a in self.args] @property def shape(self): \"\"\" tuple : Shape", "raise KeyError(f\"{name} not in {self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret", "last interactive cell if interactive and not fname.startswith('<'): break #", "list) else kwargs.pop(\"domain\") elif len(args) == 2: all_args = _flatten_iterable(args)", "nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift)", "c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback) for", "raise ValueError(f\"duplicate unequal value for node '{node}'\") context[node] = value", "getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_", "1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var for var", "graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow, self, other, graph=self.graph) def", "self, other, graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv, other, self,", "def __floordiv__(self, other): return slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self,", "the default graph. Parameters ---------- graph : Node or None", "by replacing all node names with node instances. .. note::", "context information keyed by variable name. Returns ------- values :", "def domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var", "value def is_shape_finalized(self): if self.shape == UNSET_SHAPE: return False for", "k == old_key else (k, self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n)", "floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem = nodeop(operator.getitem) gt", "new_args.append(arg) self._args = tuple(new_args) @shape.setter def shape(self, shape): self.set_shape(shape, init=True)", "args : tuple Positional arguments passed to the `_evaluate` method.", "context in place. Use :code:`context=context.copy()` to avoid the context being", "= nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ =", ": str or None Name of the node or `None`", "for idx, idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has indices", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self) def", "np.ravel_multi_index(key, dims=self.shape, order='C') ret = self.nodes.item_by_index(idx) return ret else: if", "__le__(self, other): return slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self, other):", "set_shape(self, shape=None, init=False): s = [] assert isinstance(shape, (tuple, list))", "graph nodes. Graph: {self.graph}\") def __len__(self): #TODO: Update this to", "other.__ne__(self) def __gt__(self, other): return gt(self, other, graph=self.graph) if not", "@property def target(self): return self._target @target.setter def target(self, fnc): self._target", "top-level graph. \"\"\" return self._graph def preds(self): return self._preds def", ": object Output of the node given the context. \"\"\"", "or self.is_scalar(op2): value = self.target(op1, op2) else: arg0_dom = self.args[0].domain", "has indices which are greater than the variable shape:\\n\" f\"\\tArgs:", "\"\"\" Inheriting nodes should implement this function to evaluate the", "(slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var,", "index/index_op nodes. \"\"\" def __init__(self, target, *args, **kwargs): if \"domain\"", "__call__(self, *args, **kwargs): return call(self, *args, **kwargs) def __repr__(self): return", "__rpow__(self, other): return slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self, other):", "= nodeop(operator.inv) invert = nodeop(operator.invert) ior = nodeop(operator.ior) ipow =", "functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg) for arg in self.args]", "{} elif not isinstance(context, Mapping): raise ValueError(\"`context` must be a", "other.__le__(self) def __invert__(self): return inv(self, graph=self.graph) def __neg__(self): return neg(self,", "nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile)", "self.__class__.__name__ == \"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args)", "@classmethod def init_from_args(cls, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None,", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self) def", "instantiate_graph(self, context, **kwargs): \"\"\" Instantiate a graph by replacing all", "other, self, graph=self.graph) def __mul__(self, other): return slice_op(operator.mul, self, other,", "Node or tuple[object] Output of the nodes given the context.", "kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else:", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self)", "= kwargs[\"op_name\"] if \"op_name\" in kwargs \\ else f\"{target.__name__}\" if", "filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_", "kwargs : dict Keyword arguments passed to the `_evaluate` method.", "TODO: Change in order to enable \"is shape finalized\" to", "assert self == Node._graph_stack.pop() def __repr__(self): return \"<node '%s'>\" %", "def __abs__(self): return abs_(self, graph=self.graph) def __pos__(self): return pos(self, graph=self.graph)", "not None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1", "nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance)", "= [] for arg in args: if isinstance(arg, Node): if", "Node)): key = tuple([key]) if len(key) != len(self.shape): raise KeyError(f\"Invalid", "self.graph: graph = self.graph if self._name and self._name in graph.nodes:", "collections import OrderedDict, Mapping, Sequence, deque import functools from numbers", "__rpow__(self, other): return pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target", "(\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self) def __truediv__(self, other): return truediv(self,", "except Exception as ex: # pragma: no cover messages =", "(1,) indices = (0,) single = True else: out_shape =", "return slice_op(operator.add, self, other, graph=self.graph) def __radd__(self, other): return slice_op(operator.add,", "- 1) >= indices[-1][idx] for idx, idx_val in enumerate(var.shape)]): raise", "describes the node functionality. \"\"\" return self._op_name @op_name.setter def op_name(self,", "find_node(self, name): g = self.graph while g is not None", "__or__(self, other): return or_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self) def __ne__(self, other):", "fnc): self._target = fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\"", "the top-level graph. op_name : str Operation name which describes", "context[node] = value if node.op_name in [\"placeholder\", \"state\", \"input\", \"output\",", "# pylint: disable=C0103,W0223 \"\"\" Node representing values of a variable", "__reversed__(self): return reversed_(self, graph=self.graph) def update_graph_key(self, old_key, new_key): n =", "ewll if self.graph and name in self.graph.nodes: raise ValueError(f\"duplicate name", "s in self.shape: if not isinstance(s, Integral): return False return", "ValueError If `node` is not an `Node` instance or an", "else other.__ror__(self) def __ror__(self, other): return or_(other, self, graph=self.graph) if", "other.__gt__(self) def __le__(self, other): return le(self, other, graph=self.graph) if not", "= [] stack_size = 5 evaluated_nodes = 0 def __init__(self,", "or a \" \"sequence thereof.\") fetches = [self.instantiate_node(node) for node", "Evaluate all explicit dependencies first self.evaluate_dependencies(context, callback) if self in", "function returns a hash of all attributes and subgraphs of", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self) def", "Normalized context whose keys are node instances. Raises ------ ValueError", "is_shape_finalized(self): if self.shape == UNSET_SHAPE: return False for s in", "var, index_list = self.args return var def set_name(self, name): \"\"\"", "return functools.partial(nodeop, **kwargs) # This is called when the decorator", "------ ValueError If an node with `value` already exists in", "def target(self): return self._target @target.setter def target(self, fnc): self._target =", "= nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ =", "\"\"\" This returns the functional hash of a particular node.", "old_key else (k, self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n) def insert_node(self,", "None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target = target self.added_attrs", "len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index list", "in this scope. Parameters ---------- dependencies : list Sequence of", "g is not None and name not in g.nodes: g", "{indices[-1]}\") indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else", "graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_", "= res[0] self.domain.set_computed(out_shape, indices) return res def __add__(self, other): return", "OrderedDict() self.shape = shape or tuple([]) # Get a list", "(0,) single = True else: out_shape = self.domain.shape_from_indices(indices) indices =", "keyword arguments for node in nodes: # pylint:disable=W0621 value =", "== np.prod(out_shape): if len(out_shape) > len(var.shape): for i in range(len(out_shape)):", "`Node` instance or node name\") def instantiate_graph(self, context, **kwargs): \"\"\"", "------- self : Node This node. Raises ------ ValueError If", "------ ValueError If `node` is not an `Node` instance or", "= nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ =", "slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv, self,", "@property def var(self): var, index_list = self.args return var def", "isinstance(key, Node): name.append(key.name) elif hasattr(key, \"__len__\") and not isinstance(key, str):", "self.is_scalar(op2): value = self.target(op1, op2) else: arg0_dom = self.args[0].domain arg1_dom", "value = self.target(op1, op2) return value def get_index_nodes(self, slice1_var=None, slice2_var=None):", "Raises ------ ValueError If `node` is not an `Node` instance", "return \"<node '%s'>\" % self.name def add_attribute(self, key, value): self.added_attrs.append(key)", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self) def __rlshift__(self,", "slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self, other): return slice_op(operator.mod, self,", "else: slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def", "use a random, unique identifier. shape : tuple or None", "enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_", "or numpy array, and cannot be sliced for {self.name}\") elif", "super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target = target self.added_attrs +=", "5 evaluated_nodes = 0 def __init__(self, *args, name=None, shape=None, graph=None,", "= self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single = False if isinstance(var,", "`node` is not an `Node` instance or an node name.", "---------- fetches : list[str or Node] or str or Node", "(\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self) def __ne__(self, other): return ne(self,", "context : dict Normalised context in which to evaluate the", "= Domain(tuple([])) if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx", "+= [\"domain\", \"target\"] @property def target(self): return self._target @target.setter def", "'%s'>\" % (self.target.__name__, self.name) class func_op(Node): # pylint: disable=C0103,R0903 \"\"\"", "else other.__rfloordiv__(self) def __rfloordiv__(self, other): return floordiv(other, self, graph=self.graph) if", "other): return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "invert = nodeop(operator.invert) ior = nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift", "for a in self.args] @property def shape(self): \"\"\" tuple :", "elif isinstance(key, (list)): return var_index(self, key, name=name, graph=self.graph) elif isinstance(key,", "ValueError If no `Graph` instance can be obtained. \"\"\" graph", "tuple([]) op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda", "{self.var.op_name}\" f\"dimensions for slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval", "node): # pylint:disable=W0621 \"\"\" Instantiate nodes by retrieving the node", "parameter node names. graph : Node or None Parent graph", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self) def __pow__(self, other):", "return node_hash(self) def find_node(self, name): g = self.graph while g", "ge(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "{self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph: graph = self.graph if self._name", "return getitem(self, key, graph=self.graph) else: if isinstance(key, (list)): return var_index(self,", "the top-level graph. \"\"\" return self._graph def preds(self): return self._preds", "pylint: disable=C0103,W0223 \"\"\" Node representing values of a variable corresponding", "values or index/index_op nodes. \"\"\" def __init__(self, target, *args, **kwargs):", "the node args : tuple positional arguments passed to the", "def var(self): return self @property def name(self): \"\"\"str : Unique", "associated graph. KeyError If the current name of the node", "def is_scalar(self, val=None): if val is not None and (not", "if out_shape[i] == 1: var = np.expand_dims(var, axis=i) else: var", "and val.shape[0] == 1) def _evaluate(self, op1, op2, context=None, **kwargs):", "nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter)", "graph, instead belongs to\" f\" {node.graph}\") return node raise ValueError(f\"'{node}'", "np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index list does not match {var.shape}", "return slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv,", "else other.__mod__(self) def __lshift__(self, other): return lshift(self, other, graph=self.graph) if", "CHange this to underscore private variable self.kwargs = kwargs self.graph", "context=None, **kwargs): if self.is_scalar(op1) or self.is_scalar(op2): value = self.target(op1, op2)", "to\" f\" {node.graph}\") return node raise ValueError(f\"'{node}' is not an", "kwargs.items(): node = self.nodes[name] if node in context: raise ValueError(f\"duplicate", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self)", "2: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0],", "This node. Raises ------ ValueError If an node with `value`", "for c in context: if c in fetches and c.op_name", "disable=protected-access # Do not capture any internal stack traces fname", "res def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph) def", "def __rfloordiv__(self, other): return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other,", "slice_op(operator.lt, self, other, graph=self.graph) def __le__(self, other): return slice_op(operator.lt, other,", "domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target @property def domain(self): return", "or tuple([]) # Get a list of all dependencies relevant", "is executed. args : list Sequence of positional arguments passed", ": dict[Node or str, object] Context whose keys are node", "graph=None, dependencies=None, op_name=None, value=None, **kwargs): if len(args) == 0: n", "__init__(self, target, *args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\" in", "graph=self.graph) def __lt__(self, other): return slice_op(operator.lt, self, other, graph=self.graph) def", "op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value =", "slice2_idx def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph) def", ": str Unique name of the node. Returns ------- self", "idx in indices]).reshape(out_shape) if out_shape == (1,) and len(indices) ==", "else: domain = Domain(idx) super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs)", "target, *args, **kwargs): if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\"))", "nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr)", "zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_", "self elif self.is_shape_finalized() and len(self.nodes) > 0: if isinstance(key, (int,", "\"\".join(traceback.format_list(reversed(stack))) message = \"Failed to evaluate node `%s` defined at:\\n\\n%s\"", "name): return getattr_(self, name, graph=self.graph) def __getitem__(self, key): if self.__class__.__name__", "in which to evaluate the nodes. callback : callable or", "self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral): key =", "**kwargs) self.target = target self.added_attrs += [\"domain\", \"target\"] @property def", "Sequence): single = False else: raise ValueError(\"`fetches` must be an", "`Node` instance, node name, or a \" \"sequence thereof.\") fetches", "name not in g.nodes: g = g.graph if name in", "\"index\")) else other.__ne__(self) def __gt__(self, other): return gt(self, other, graph=self.graph)", "graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name = name", "If graph is `None`, this is the top-level graph. op_name", "graph): self._graph = Node.get_active_graph(graph) @property def gname(self): scope_names = [self.name]", "self._name = name return self def evaluate_dependencies(self, context, callback=None): \"\"\"", "else other.__xor__(self) def __lt__(self, other): return lt(self, other, graph=self.graph) if", "value for any node. ValueError If `context` is not a", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self) def __radd__(self, other): return", "instance or name of an node. Returns ------- instantiated_node :", "graph=self.graph) def __xor__(self, other): return slice_op(operator.xor, self, other, graph=self.graph) def", "op_name # Get the stack context so we can report", "node.is_shape_finalized(): context[node] = node.evaluate(context) return context def run(self, fetches, context=None,", "current name of the node cannot be found in the", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self) def __radd__(self,", "combination. Parameters ---------- value : str Unique name of the", "shapes.append(int(dim)) else: raise TypeError(f\"Shape value must be placeholder or integer", "functools from numbers import Integral, Rational, Real import contextlib import", "UNSET_SHAPE else: shapes = [] for dim in shape: if", "def __iter__(self): num = len(self) for i in range(num): yield", "other.__rfloordiv__(self) def __rfloordiv__(self, other): return floordiv(other, self, graph=self.graph) if not", "def argnames(self): return [a.name if isinstance(a, Node) else a for", "or Node._graph_stack[-1] return graph def instantiate_node(self, node): # pylint:disable=W0621 \"\"\"", "= self.var.name + \"[\" + \"][\".join(name) + \"]\" if name", "the node functionality. \"\"\" return self._op_name @op_name.setter def op_name(self, op_name):", "dict Additional context information keyed by variable name. Returns -------", "is not an `Node` instance or an node name. RuntimeError", "for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes) @staticmethod", ": Node or None Parent graph of this node. If", "**kwargs) else: n = cls(*args, name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies,", "if name not in self.nodes.keys(): raise KeyError(f\"{name} not in {self.name}", "object associated with the node name. Parameters ---------- node :", "isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain else:", "other.__floordiv__(self) def __mod__(self, other): return mod(self, other, graph=self.graph) if not", "self._op_name = op_name elif self.__class__.__name__ == \"Node\": self._op_name = self.name", "add(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "return func_op(target, *args, **kwargs_inner, **kwargs) return _wrapper @nodeop def call(func,", "__eq__(self, other): return hash(self) == hash(other) def __getattr__(self, name): return", "return neg(self, graph=self.graph) def __abs__(self): return abs_(self, graph=self.graph) def __pos__(self):", "__ror__(self, other): return slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self, other):", "def __ne__(self, other): return slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self,", "slice1_var, slice1_idx, slice2_var, slice2_idx def __add__(self, other): return slice_op(operator.add, self,", "{self} graph, instead belongs to\" f\" {node.graph}\") return node raise", "\"\"\" if isinstance(fetches, (str, Node)): fetches = [fetches] single =", "else other.__rtruediv__(self) def __floordiv__(self, other): return floordiv(self, other, graph=self.graph) if", "else: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0],", "tuple(values) def __getstate__(self): return self.__dict__ def __setstate__(self, data): self.__dict__.update(data) def", "== \"Node\": context[self] = self.value = self._evaluate(*args, context=context, **kwargs) else:", "var_index(self.var, key, graph=self) else: ret = var_index(self.var, tuple([key]), graph=self) return", "= nodeop(operator.gt) index = nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv =", "(\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self) def __rrshift__(self, other): return rshift(other,", "else kwargs.pop(\"domain\") else: domain = Domain(idx) super(var_index, self).__init__(var, idx, name=name,", "self, other, graph=self.graph) def __le__(self, other): return slice_op(operator.lt, other, self,", "dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_", "self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return self @property def", "# Evaluate all explicit dependencies first self.evaluate_dependencies(context, callback) if self", "self @property def domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None):", "= nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub = nodeop(operator.sub) truediv =", "be sliced for {self.name}\") elif isinstance(var, list): var = np.asarray(var)", "= list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices))", "index = nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv) invert", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self) def", "target kwargs : dict keywoard arguments passed to the target", "if shape and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif shape and", "if slice1_var is None and slice2_var is None: slice1_var, slice2_var", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self) def __rsub__(self, other): return", "Domain(tuple([])) @property def args(self): \"\"\" tuple : Positional arguments which", "= self self._name = name return self def evaluate_dependencies(self, context,", "def __call__(self, *args, **kwargs): return call(self, *args, **kwargs) def __repr__(self):", "the context being modified. Parameters ---------- context : dict[Node or", "**kwargs): \"\"\" Instantiate a graph by replacing all node names", "self.__dict__ def __setstate__(self, data): self.__dict__.update(data) def set_name(self, name): \"\"\" Set", "'stop', 'step']]) return node except Exception as ex: # pragma:", "`Node` instance, node name, or a sequence thereof. \"\"\" if", "single else tuple(values) def __getstate__(self): return self.__dict__ def __setstate__(self, data):", "shape and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif shape and isinstance(shape[idx],", "index {self} with variable shape {self.var.shape}\") return True else: return", "preds(self): return self._preds def succs(self): return self._preds def add_predecessor(self, pred):", "def shape(self, shape): self.set_shape(shape, init=True) @graph.setter def graph(self, graph): self._graph", "Node or None Parent graph of this node. If graph", "n = cls(*args, name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs)", "def __rtruediv__(self, other): return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other,", "idx : tuple Tuple of either integer values or index/index_op", "in {self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret = self.nodes[name] return", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self) def __pow__(self,", "other, graph=self.graph) def __repr__(self): return \"<slice_%s '%s'>\" % (self.target.__name__, self.name)", "[\"domain\", \"target\"] @property def target(self): return self._target @target.setter def target(self,", "uuid.uuid4().hex self._op_name = None self.op_name = op_name # Get the", "if isinstance(node, slice): return slice(*[partial(getattr(node, attr)) for attr in ['start',", "to be evaluated when an node is evaluated. kwargs :", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self, other):", "find {name} in graph nodes. Graph: {self.graph}\") def __len__(self): #TODO:", "None Shape of the output for a node. This can", "tracing at the last interactive cell if interactive and not", "graph : Node or None Graph to return or `None`", "isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args) slice1_var, slice1_idx,", "lshift = nodeop(operator.lshift) lt = nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller", "name of an node. Returns ------- instantiated_node : Node Node", "dependencies=None, op_name=None, value=None, **kwargs): if len(args) == 0: n =", "else: self._succesors.append(succ) def set_edges(self): for e in self.args: self.add_predecessor(e) if", "contextlib import traceback import uuid import numpy as np import", "**kwargs): if self.is_scalar(op1) or self.is_scalar(op2): value = self.target(op1, op2) else:", "Raises ------ ValueError If `fetches` is not an `Node` instance,", "not None and name not in g.nodes: g = g.graph", "return self.graph.nodes[name] elif isinstance(key, (list)): return var_index(self, key, name=name, graph=self.graph)", "which are greater than the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar", "if isinstance(node, tuple): return tuple(partial(element) for element in node) if", "\"\"\" return tuple(self._args) @property def argnames(self): return [a.name if isinstance(a,", "\"\"\" Obtain the currently active graph instance by returning the", "self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n) def insert_node(self, node, idx): node_list", "(tuple, list, np.ndarray)) and len(key) == 0: return self elif", "isinstance(key, tuple): return var_index(self, list(key), name=name, graph=self.graph) else: return var_index(self,", "= nodeop(operator.indexOf) inv = nodeop(operator.inv) invert = nodeop(operator.invert) ior =", "is not equal to the current ndoe as ewll if", "else other.__truediv__(self) def __rtruediv__(self, other): return truediv(other, self, graph=self.graph) if", "callable Function to call when the node is executed. args", "= nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ =", "by returning the explicitly given graph or using the default", "\"var_index\", \"index\")) else other.__rshift__(self) def __and__(self, other): return and_(self, other,", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self)", "else other.__rxor__(self) def __rxor__(self, other): return xor(other, self, graph=self.graph) if", "Integral)): shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim)) else: raise TypeError(f\"Shape value", "elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([]))", "describes the node functionality. value : Any or None If", "Graph() self.value = value self.dependencies = [] self._args = []", "= self self._name = name return self def __getitem__(self, key):", "= (0,) single = True else: out_shape = self.domain.shape_from_indices(indices) indices", "def __rshift__(self, other): return slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self,", "def __rand__(self, other): return slice_op(operator.and_, other, self, graph=self.graph) def __or__(self,", "is None else dependencies if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a", "@property def graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent graph of this", "else: if isinstance(key, (list)): return var_index(self, key, graph=self) elif isinstance(key,", "var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain else: slice1_idx =", "None self.name = name or uuid.uuid4().hex self._op_name = None self.op_name", "arg in args: if isinstance(arg, Node): if self.__class__.__name__ == \"Node\":", "new name with a unique stringwhich corresponds to the variable,", "other, self, graph=self.graph) def __mod__(self, other): return slice_op(operator.mod, self, other,", "# TODO: Change in order to enable \"is shape finalized\"", "slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow, other,", "nodeop(operator.or_) pos = nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem)", "\"var_index\", \"index\")) else other.__mul__(self) def __truediv__(self, other): return truediv(self, other,", "var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape) if", "any nodes in this scope. Parameters ---------- dependencies : list", "if node.name not in self.nodes and (node.graph != self): raise", "_wrapper(*args, **kwargs_inner): return func_op(target, *args, **kwargs_inner, **kwargs) return _wrapper @nodeop", "is evaluated. \"\"\" for node in self.dependencies: node.evaluate(context, callback) def", "import numpy as np import importlib from .graph import Graph", "__xor__(self, other): return slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self, other):", "def control_dependencies(dependencies, graph=None): \"\"\" Ensure that all `dependencies` are executed", "graph=self.graph) def __ne__(self, other): return slice_op(operator.ne, self, other, graph=self.graph) def", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self) def __rmod__(self, other):", "node. The default hash returns an object id, whereas this", "== 1 and val.shape[0] == 1) def _evaluate(self, op1, op2,", "slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift, self,", "an node or constant given a context. \"\"\" Node.evaluated_nodes +=", "belong to this graph. \"\"\" if isinstance(node, str): return self.nodes[node]", "instances. .. note:: This function modifies the context in place.", "def __add__(self, other): return add(self, other, graph=self.graph) if not _is_node_type_instance(other,", "\"var_index\", \"index\")) else other.__ror__(self) def __ror__(self, other): return or_(other, self,", "c in fetches and c.op_name in [\"output\", \"state\", \"temp\"]: write_name", "= nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ =", "the nodes given the context. Raises ------ ValueError If `fetches`", "fetches and c.op_name in [\"output\", \"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\"", "return var_index(self, key, name=name, graph=self.graph) elif isinstance(key, tuple): return var_index(self,", "graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift, other, self, graph=self.graph) def", "op_name): if op_name: self._op_name = op_name elif self.__class__.__name__ == \"Node\":", "cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self)", "def __exit__(self, *args): assert self == Node._graph_stack.pop() def __repr__(self): return", "num = len(self) for i in range(num): yield self[i] def", "name.append(key) name = self.var.name + \"[\" + \"][\".join(name) + \"]\"", "def gname(self): scope_names = [self.name] cgraph = self.graph while cgraph:", "specifies more than one value for any node. ValueError If", "self.args: self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self) @property def domain(self): return", "[key], name=name, graph=self.graph) def set_shape(self, shape=None, init=False): s = []", "is_scalar(self, val): return not isinstance(val, np.ndarray) or (len(val.shape) == 1", "def __lt__(self, other): return lt(self, other, graph=self.graph) if not _is_node_type_instance(other,", "nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum)", "elif self.is_shape_finalized() and len(self.nodes) > 0: if isinstance(key, (int, Node)):", "\"index\")) else other.__xor__(self) def __lt__(self, other): return lt(self, other, graph=self.graph)", "{self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph: graph = self.graph", "node is not equal to the current ndoe as ewll", "\"op_name\" in kwargs \\ else f\"{target.__name__}\" if \"domain\" in kwargs:", "associated graph. \"\"\" # TODO: Need a way to check", "self.name = name or uuid.uuid4().hex self._op_name = None self.op_name =", "= traceback.extract_stack(limit=1) @property def graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent graph", "\"index\")) else other.__rxor__(self) def __rxor__(self, other): return xor(other, self, graph=self.graph)", "callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ = nodeop(builtins.len) property_", "node names. graph : Node or None Parent graph of", "def __hash__(self): return id(self) def func_hash(self): \"\"\" This returns the", "\"\"\" Node representing values of a variable corresponding to input", "stack = [] if isinstance(n, Node): for frame in reversed(n._stack):", "be found in the associated graph. \"\"\" name = name", "indices) return res def __add__(self, other): return slice_op(operator.add, self, other,", "(slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var,", "Parameters ---------- func : callable Function to call when the", "= None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target = target", "tuple[object] Output of the nodes given the context. Raises ------", "nodeop(builtins.len) property_ = nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr)", "args if \"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs = [] #", "other): return or_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "else other.__gt__(self) def __le__(self, other): return le(self, other, graph=self.graph) if", "`Node` instance but does not belong to this graph. \"\"\"", "self def __getitem__(self, key): if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape):", "slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else: domain", "\"\"\" Decorator for creating nodes from functions. \"\"\" # This", "name(self): \"\"\"str : Unique name of the node\"\"\" return self._name", "cgraph.graph return \"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return self def __exit__(self,", "== UNSET_SHAPE: return False for s in self.shape: if not", "raise ValueError(f\"duplicate value for node '{node}'\") context[node] = value if", "**kwargs) return self.value def _evaluate(self, *args, context=None, **kwargs): \"\"\" Inheriting", "tuple([shape]) elif not shape or len(shape) == 0: # TODO:", "with the node name. Parameters ---------- node : Node or", "context[node] = node.evaluate(context) return context def run(self, fetches, context=None, *,", "= nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq =", "the node to the graph self._name = None self.name =", "or more nodes given a dictionary of node names with", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self)", "'{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph: graph", "\"var_index\", \"index\")) else other.__rrshift__(self) def __rrshift__(self, other): return rshift(other, self,", "graph=self.graph) def set_shape(self, shape=None, init=False): s = [] assert isinstance(shape,", "def find_node(self, name): g = self.graph while g is not", "pow_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "ValueError(f\"duplicate value for node '{node}'\") context[node] = value if node.op_name", "other, self, graph=self.graph) def __ne__(self, other): return slice_op(operator.ne, self, other,", "length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt = nodeop(operator.lt) matmul", "dim in shape: if isinstance(dim, (Node, Integral)): shapes.append(dim) elif isinstance(dim,", "var = np.asarray([var]) elif not isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable", "if self in context: return context[self] # Evaluate the parents", "def __getitem__(self, key): if self.__class__.__name__ != \"Node\": if isinstance(key, (slice,", "out_shape[i] == 1: var = np.expand_dims(var, axis=i) else: var =", "== np.product(shape) and len(shape) > 0: self._shape = shape if", "a \" \"sequence thereof.\") fetches = [self.instantiate_node(node) for node in", "graph = self.graph if self._name is not None and self._name", "in self.dependencies: node.evaluate(context, callback) def evaluate(self, context, callback=None): \"\"\" Evaluate", "__ror__(self, other): return or_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "other, graph=self.graph) def __rand__(self, other): return slice_op(operator.and_, other, self, graph=self.graph)", "to check if the existing node is not equal to", "ValueError(f\"var_index {self.name} has indices which are greater than the variable", "dict[Node, object] Normalized context whose keys are node instances. Raises", "nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ = nodeop(builtins.format)", "[self.name] cgraph = self.graph while cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self)", "slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul, other,", ": dict[str, object] Additional context information keyed by variable name.", "value for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes)", "return self._shape @property def var(self): return self @property def name(self):", "any internal stack traces fname = frame.filename if 'polymath' in", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self) def __rshift__(self, other):", "__ne__(self, other): return slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self, other):", "Parameters ---------- var : Node The multi-dimensional variable used for", "numpy array, and cannot be sliced for {self.name}\") elif isinstance(var,", "self.graph while cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names))) def", "by variable name. Returns ------- values : Node or tuple[object]", "name=name, graph=self.graph) else: return var_index(self, [key], name=name, graph=self.graph) def set_shape(self,", "graph=None): \"\"\" Ensure that all `dependencies` are executed before any", "return True def set_shape(self, shape=None, init=False): if isinstance(shape, float): self._shape", "fetches : list[str or Node] or str or Node One", "\"index\")) else other.__mul__(self) def __truediv__(self, other): return truediv(self, other, graph=self.graph)", "is not a list or numpy array, and cannot be", "index=%s>\" % (self.name, self.args) class slice_op(Node): \"\"\" Node representing multi-dimensional", "name\") def instantiate_graph(self, context, **kwargs): \"\"\" Instantiate a graph by", "if node in context: raise ValueError(f\"duplicate unequal value for node", "return [partial(element) for element in node] if isinstance(node, dict): return", "isinstance(key, (int, Node)): key = tuple([key]) if len(key) != len(self.shape):", "return var_index(self, [key], graph=self) else: return self.nodes[key] def __add__(self, other):", "name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\" f\"Existing:", "of positional arguments passed to `func`. kwargs : dict Mapping", "returns a hash of all attributes and subgraphs of a", "var_index(self, key, name=name, graph=self.graph) elif isinstance(key, tuple): return var_index(self, list(key),", "add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key] = value def is_shape_finalized(self): if", "arguments if target is None: return functools.partial(nodeop, **kwargs) # This", "nodeop(operator.inv) invert = nodeop(operator.invert) ior = nodeop(operator.ior) ipow = nodeop(operator.ipow)", "return id(self) def func_hash(self): \"\"\" This returns the functional hash", "def __init__(self, target, *args, **kwargs): if \"domain\" in kwargs: domain", "Node) else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else", "node and discard the values. Parameters ---------- context : dict", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self) def __rrshift__(self, other): return", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self) def", "in node.items()} if isinstance(node, slice): return slice(*[partial(getattr(node, attr)) for attr", "g = self.graph while g is not None and name", "**kwargs) class EvaluationError(RuntimeError): \"\"\" Failed to evaluate an node. \"\"\"", "Returns ------- self : Node This node. Raises ------ ValueError", "self.dependencies.extend(self.graph.dependencies) # Choose a name for the node and add", "tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: domain = Domain(idx)", "other): return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "discard the values. Parameters ---------- context : dict Normalised context", "insert_node(self, node, idx): node_list = list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes", "(int, Node)): key = tuple([key]) if len(key) != len(self.shape): raise", "op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2) return value def get_index_nodes(self, slice1_var=None,", "@shape.setter def shape(self, shape): self.set_shape(shape, init=True) @graph.setter def graph(self, graph):", "name. Parameters ---------- node : Node or str Node instance", "target \"\"\" def __init__(self, target, *args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"]", "Exception as ex: # pragma: no cover messages = []", "(\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __rpow__(self, other): return pow_(other,", "context, callback=callback) for fetch in fetches] return values[0] if single", "== UNSET_SHAPE: raise TypeError(f'`shape` must be specified explicitly for nodes", "\"index\")) else other.__lt__(self) def __ge__(self, other): return ge(self, other, graph=self.graph)", "`func`. \"\"\" return func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\"", "self, graph=self.graph) def __truediv__(self, other): return slice_op(operator.truediv, self, other, graph=self.graph)", "tuple): ret = var_index(self.var, key, graph=self) else: ret = var_index(self.var,", "slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self, other): return slice_op(operator.mul, self,", "If no `Graph` instance can be obtained. \"\"\" graph =", "> 0: self._shape = shape if isinstance(shape, tuple) else tuple(shape)", "Upper bounds: {indices[-1]}\") indices = list(map(lambda x: x.tolist() if isinstance(x,", "is not an `Node` instance or node name\") def instantiate_graph(self,", "other): return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<slice_%s", "= nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ =", "get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None and slice2_var is", "this node. If graph is `None`, this is the top-level", "\"var_index\", \"index\")) else other.__and__(self) def __or__(self, other): return or_(self, other,", "be placeholder or integer value for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs}", "__xor__(self, other): return xor(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "`Node` instances or names to evaluate. context : dict or", "next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_", "(\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self) def __mod__(self, other): return mod(self,", ": Unique name of the node\"\"\" return self._name @property def", "to use a random, unique identifier. shape : tuple or", "graph=self.graph) else: if isinstance(key, (list)): return var_index(self, key, graph=self) elif", "stringwhich corresponds to the variable, index combination. Parameters ---------- value", "context=None, **kwargs): \"\"\" Inheriting nodes should implement this function to", "Evaluate the parents partial = functools.partial(self.evaluate_node, context=context, callback=callback) args =", "raise TypeError(f\"Variable {var} with type {type(var)} is not a list", "Stack: {Node._eval_stack}\") if not single and not all([(idx_val - 1)", ": Node or tuple[object] Output of the nodes given the", "not isinstance(s, Integral): return False return True def set_shape(self, shape=None,", "other.__rlshift__(self) def __rlshift__(self, other): return lshift(other, self, graph=self.graph) if not", "\"index\")) else other.__gt__(self) def __le__(self, other): return le(self, other, graph=self.graph)", "open_ = nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_", "class for nodes. Parameters ---------- args : tuple Positional arguments", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self)", "return var def set_name(self, name): \"\"\" Set the name for", "tuple): return tuple(partial(element) for element in node) if isinstance(node, list):", "be a tuple of integers or parameter node names. graph", "this node and discard the values. Parameters ---------- context :", "return self.nodes[key] def __add__(self, other): return add(self, other, graph=self.graph) if", "hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add", "else other.__add__(self) def __sub__(self, other): return sub(self, other, graph=self.graph) if", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self) def __rmul__(self, other): return", "def __rmul__(self, other): return mul(other, self, graph=self.graph) if not _is_node_type_instance(other,", "UNSET_SHAPE: raise TypeError(f'`shape` must be specified explicitly for nodes {self}')", "other): return slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self, other): return", "str or None Name of the node or `None` to", "defined self._stack = traceback.extract_stack(limit=1) @property def graph(self): \"\"\" polymath.srdfg.graph.Graph :", "= nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ =", "\"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args) @shape.setter def", "__rmod__(self, other): return slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self, other):", "cannot be found in the associated graph. \"\"\" name =", "Obtain the currently active graph instance by returning the explicitly", "neg(self, graph=self.graph) def __abs__(self): return abs_(self, graph=self.graph) def __pos__(self): return", "(self.target.__name__, self.name) class func_op(Node): # pylint: disable=C0103,R0903 \"\"\" Node wrapper", "`value` already exists in the associated graph. KeyError If the", "if self._name and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name]", "__rrshift__(self, other): return rshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip)", ": Node This node. Raises ------ ValueError If an node", "n def __bool__(self): return True def __hash__(self): return id(self) def", "sorted_ = nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_", "for key, value in self.kwargs.items() if key not in self.added_attrs}", "super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs) @property def domain(self): return", "placeholder or integer value for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \")", "raise KeyError(f\"Invalid key shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\")", "ge = nodeop(operator.ge) getitem = nodeop(operator.getitem) gt = nodeop(operator.gt) index", "for frame in reversed(n._stack): # pylint: disable=protected-access # Do not", "(k, self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n) def insert_node(self, node, idx):", "given a dictionary of node names with their values. ..", "self.added_attrs += [\"domain\", \"target\"] @property def target(self): return self._target @target.setter", "context = self.instantiate_graph(context, **kwargs) for c in context: if c", "dependencies relevant to this node self.dependencies = [] if dependencies", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__add__(self) def __sub__(self, other): return", "def __mod__(self, other): return mod(self, other, graph=self.graph) if not _is_node_type_instance(other,", "self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args) @shape.setter def shape(self, shape): self.set_shape(shape,", "\"state\", \"input\", \"output\", \"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context)", "is called when the decorator is used without arguments @functools.wraps(target)", "== \"Node\": self._op_name = self.name else: self._op_name = self.__class__.__name__ @name.setter", "thereof.\") fetches = [self.instantiate_node(node) for node in fetches] context =", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self)", "fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch", "__mul__(self, other): return slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self, other):", "belongs to\" f\" {node.graph}\") return node raise ValueError(f\"'{node}' is not", "message = \"Failed to evaluate node `%s` defined at:\\n\\n%s\" %", "op_name(self): \"\"\" str : Operation name which describes the node", "top-level graph. op_name : str Operation name which describes the", "Node) or not is_iterable(node): node = [node] for n in", "name of the node cannot be found in the associated", "= False if isinstance(node, Node) or not is_iterable(node): node =", "context information keyed by variable name. Returns ------- normalized_context :", "return slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self, other): return slice_op(operator.xor,", "nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter)", "node name, or a sequence thereof. \"\"\" if isinstance(fetches, (str,", "ValueError If `context` is not a mapping. \"\"\" if context", "if isinstance(dim, (Node, Integral)): shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim)) else:", "graph=self) elif isinstance(key, tuple): ret = var_index(self.var, key, graph=self) else:", "\"output\", \"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) return context", "name, or a sequence thereof. \"\"\" if isinstance(fetches, (str, Node)):", "information keyed by variable name. Returns ------- normalized_context : dict[Node,", "array, and cannot be sliced for {self.name}\") elif isinstance(var, list):", "isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif shape and isinstance(shape[idx], float): s.append(int(shape[idx]))", "an node is evaluated. kwargs : dict Additional context information", "target self.added_attrs += [\"domain\", \"target\"] @property def target(self): return self._target", "mul(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "it can be set using `value`. kwargs : dict Keyword", "**kwargs) def __call__(self, *args, **kwargs): return call(self, *args, **kwargs) def", "_is_node_type_instance, is_iterable class Node(object): \"\"\" Base class for nodes. Parameters", "cls(*args, name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) return n", "other.__rrshift__(self) def __rrshift__(self, other): return rshift(other, self, graph=self.graph) if not", "None Context in which to evaluate the nodes. callback :", "elif isinstance(var, list): var = np.asarray(var) if len(var.shape) != len(out_shape)", "repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_", "return slice(*[partial(getattr(node, attr)) for attr in ['start', 'stop', 'step']]) return", ": dict or None Context in which to evaluate the", "(\"slice_op\", \"var_index\", \"index\")) else other.__add__(self) def __sub__(self, other): return sub(self,", "\"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")]) if", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self) def", "= f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target =", "shape: if isinstance(dim, (Node, Integral)): shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim))", "relevant to this node self.dependencies = [] if dependencies is", "return node except Exception as ex: # pragma: no cover", "decorator is used with arguments if target is None: return", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self) def", "or parameter node names. graph : Node or None Parent", "Mapping, Sequence, deque import functools from numbers import Integral, Rational,", "'%s'>\" % self.name def add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key] =", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__and__(self)", "variable self.kwargs = kwargs self.graph = graph self._shape = OrderedDict()", "= nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge =", "self.nodes[name] if node in context: raise ValueError(f\"duplicate value for node", "in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\"", "#TODO: Update this to check for finalzied shape if self.shape", "else other.__lt__(self) def __ge__(self, other): return ge(self, other, graph=self.graph) if", "Integral)): return getitem(self, key, graph=self.graph) else: if isinstance(key, (list)): return", "other, self, graph=self.graph) def __or__(self, other): return slice_op(operator.or_, self, other,", "str): for k in key: if isinstance(k, Node): name.append(k.name) else:", "Node] or str or Node One or more `Node` instances", "object] Normalized context whose keys are node instances. Raises ------", "Call `func` with positional arguments `args` and keyword arguments `kwargs`.", "raise ValueError(f\"Index list does not match {var.shape} in {self.var.name} -", "arguments `kwargs`. Parameters ---------- func : callable Function to call", "\"var_index\", \"index\")) else other.__rpow__(self) def __rpow__(self, other): return pow_(other, self,", "== 1)): if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1", "stack) messages.append(message) raise ex from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args,", "graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv, self, other, graph=self.graph) def", "mul(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", ".util import _noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable class Node(object):", "[] assert isinstance(shape, (tuple, list)) if all([isinstance(sv, Integral) for sv", "while g is not None and name not in g.nodes:", "def insert_node(self, node, idx): node_list = list(self.nodes.items()) node_list.insert(idx, (node.name, node))", "__repr__(self): return \"<var_index name=%s, index=%s>\" % (self.name, self.args) class slice_op(Node):", "= self.__class__.__name__ @name.setter def name(self, name): self.set_name(name) @args.setter def args(self,", "the nodes. callback : callable or None Callback to be", "default hash returns an object id, whereas this function returns", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self) def __rlshift__(self, other): return", "self.is_scalar(op1) or self.is_scalar(op2): value = self.target(op1, op2) else: arg0_dom =", "mod = nodeop(operator.mod) mul = nodeop(operator.mul) ne = nodeop(operator.ne) neg", "If `fetches` is not an `Node` instance, node name, or", "---------- func : callable Function to call when the node", "def __len__(self): #TODO: Update this to check for finalzied shape", "evaluate the node. \"\"\" return self(*args, context, **kwargs) @classmethod def", "cannot be found in the associated graph. \"\"\" # TODO:", "self, other, graph=self.graph) def __ge__(self, other): return slice_op(operator.ge, self, other,", "positional arguments `args` and keyword arguments `kwargs`. Parameters ---------- func", "EvaluationError(RuntimeError): \"\"\" Failed to evaluate an node. \"\"\" class var_index(Node):", "a node. Parameters ---------- target : cal The multi-dimensional variable", "['start', 'stop', 'step']]) return node except Exception as ex: #", "of the node cannot be found in the associated graph.", "'{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph: graph", "ndoe as ewll if self.graph and name in self.graph.nodes: raise", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self) def __pow__(self, other): return", "and update the graph. Parameters ---------- value : str Unique", "\"\"\" if context is None: context = {} elif not", "Returns ------- normalized_context : dict[Node, object] Normalized context whose keys", "found in the associated graph. \"\"\" name = name or", "the associated graph. \"\"\" name = name or uuid.uuid4().hex #", "oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_", "graph. Raises ------ ValueError If no `Graph` instance can be", "return self._name @property def op_name(self): \"\"\" str : Operation name", "nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord)", "= nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add = nodeop(operator.add) and_ =", "a variable index, making sure to replicate the new name", "self, other, graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv, other, self,", "other, graph=self.graph) def __rmatmul__(self, other): return matmul(other, self, graph=self.graph) def", "# TODO: CHange this to underscore private variable self.kwargs =", "to call when the node is executed. args : list", "\\ else f\"{target.__name__}\" if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\"))", "def call(func, *args, **kwargs): \"\"\" Call `func` with positional arguments", "an `Node` instance but does not belong to this graph.", "single = False if isinstance(var, (Integral, Real, str)): var =", "------- normalized_context : dict[Node, object] Normalized context whose keys are", "other): return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "type {type(var)} is not a list or numpy array, and", "f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\") indices = list(map(lambda", "value=None, **kwargs): if len(args) == 0: n = cls(name=name, shape=shape,", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self)", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__add__(self) def __sub__(self,", "f\"Eval Stack: {Node._eval_stack}\") if not single and not all([(idx_val -", "\"/\".join(list(reversed(scope_names))) def __enter__(self): Node._graph_stack.append(self) return self def __exit__(self, *args): assert", "return slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv,", "instance, node name, or a \" \"sequence thereof.\") fetches =", "arguments passed to the `_evaluate` method. name : str or", "is None: return functools.partial(nodeop, **kwargs) # This is called when", "a node. \"\"\" return node_hash(self) def find_node(self, name): g =", "= np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x: op2[x],", "other, graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv, other, self, graph=self.graph)", "for creating nodes from functions. \"\"\" # This is called", "the new name with a unique stringwhich corresponds to the", "the node was defined self._stack = traceback.extract_stack(limit=1) @property def graph(self):", "in range(len(out_shape)): if out_shape[i] == 1: var = np.expand_dims(var, axis=i)", "name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): self.nodes = Graph()", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__and__(self) def __or__(self, other): return", "an node. \"\"\" class var_index(Node): # pylint: disable=C0103,W0223 \"\"\" Node", "f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph: graph = self.graph if", "or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([]))", "argnames(self): return [a.name if isinstance(a, Node) else a for a", "this is the top-level graph. \"\"\" return self._graph def preds(self):", "name. Returns ------- values : Node or tuple[object] Output of", "is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint", "if isinstance(node, list): return [partial(element) for element in node] if", "the last interactive cell if interactive and not fname.startswith('<'): break", "for finalzied shape if self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must", "le = nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt", "node with `value` already exists in the associated graph. KeyError", "= nodeop(operator.concat) contains = nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem =", "graph=self.graph) def __ge__(self, other): return slice_op(operator.ge, self, other, graph=self.graph) def", "KeyError(f\"Invalid key shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name", "return self.kwargs[\"domain\"] @property def var(self): var, index_list = self.args return", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self) def __lshift__(self, other): return", "self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target = target self.added_attrs += [\"domain\",", "EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None,", "target(self): return self._target @target.setter def target(self, fnc): self._target = fnc", "return and_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op,", "var(self): var, index_list = self.args return var def set_name(self, name):", "polymath.srdfg.graph.Graph : Parent graph of this node. If graph is", "{self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes) @staticmethod def", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self) def __rand__(self, other):", "list)) if all([isinstance(sv, Integral) for sv in shape]) and len(self.domain)", "TypeError(f'`shape` must be specified explicitly for nodes {self}') return self.shape[0]", "= slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([])) self._target = None super(func_op,", "isinstance(shape, float): self._shape = tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape =", "not belong to this graph. \"\"\" if isinstance(node, str): return", "way to check if the existing node is not equal", "the node given the context. \"\"\" # Evaluate all explicit", "\"var_index\", \"index\")) else other.__lt__(self) def __ge__(self, other): return ge(self, other,", "ex: # pragma: no cover messages = [] interactive =", "self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs): if self.is_scalar(var):", "= False if isinstance(var, (Integral, Real, str)): var = np.asarray([var])", "self.graph.nodes: return self.graph.nodes[name] elif isinstance(key, (list)): return var_index(self, key, name=name,", "def evaluate_node(cls, node, context, **kwargs): \"\"\" Evaluate an node or", "matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul", "\"is shape finalized\" to work self._shape = UNSET_SHAPE else: shapes", "node: stack = [] if isinstance(n, Node): for frame in", "instance, node name, or a sequence thereof. \"\"\" if isinstance(fetches,", "other.__rsub__(self) def __rsub__(self, other): return sub(other, self, graph=self.graph) if not", "dict[Node or str, object] Context whose keys are node instances", "name of the node. Returns ------- self : Node This", "in the associated graph. KeyError If the current name of", "name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing:", "if self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a name for the node", "key, name=name, graph=self.graph) elif isinstance(key, tuple): return var_index(self, list(key), name=name,", "Real, str)): var = np.asarray([var]) elif not isinstance(var, (np.ndarray, list)):", "evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate the dependencies of this node", "# Evaluate the parents partial = functools.partial(self.evaluate_node, context=context, callback=callback) args", "for slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\")", "init=False): if isinstance(shape, float): self._shape = tuple([np.int(shape)]) elif isinstance(shape, Integral):", "must be specified explicitly for nodes {self}') return self.shape[0] def", "disable=C0103 abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help)", "called when the decorator is used without arguments @functools.wraps(target) def", "of all attributes and subgraphs of a node. \"\"\" return", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__radd__(self)", "def __or__(self, other): return or_(self, other, graph=self.graph) if not _is_node_type_instance(other,", "check if the existing node is not equal to the", "new_args = [] for arg in args: if isinstance(arg, Node):", "domain(self): return self.kwargs[\"domain\"] @property def var(self): var, index_list = self.args", "else other.__rpow__(self) def __matmul__(self, other): return matmul(self, other, graph=self.graph) def", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__sub__(self) def", "in fetches] return values[0] if single else tuple(values) def __getstate__(self):", "self, other, graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift, other, self,", "def __radd__(self, other): return add(other, self, graph=self.graph) if not _is_node_type_instance(other,", "Raises ------ ValueError If the context specifies more than one", "from .util import _noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable class", "[] for dim in shape: if isinstance(dim, (Node, Integral)): shapes.append(dim)", "= nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt =", "+ \"[\" + \"][\".join(name) + \"]\" if name in self.graph.nodes:", "return slice1_var, slice1_idx, slice2_var, slice2_idx def _evaluate(self, *args, **kwargs): for", "complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_", "a node has a default value to use for execution,", "def shape(self): \"\"\" tuple : Shape of the output for", "vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_", "replacing all node names with node instances. .. note:: This", "as ewll if self.graph and name in self.graph.nodes: raise ValueError(f\"duplicate", "Sequence, deque import functools from numbers import Integral, Rational, Real", "self.nodes[k]) if k == old_key else (k, self.nodes[k]), self.nodes.keys())) self.nodes", "other): return slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self, other): return", "import UNSET_SHAPE, DEFAULT_SHAPES import builtins import operator from collections import", "This can be a tuple of integers or parameter node", "def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None and slice2_var", "fetches] context = self.instantiate_graph(context, **kwargs) for c in context: if", "or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx", "if isinstance(node, Node): if node.name not in self.nodes and (node.graph", "return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "= shape if isinstance(shape, tuple) else tuple(shape) else: for idx,", "name of the node and update the graph. Parameters ----------", "node.items()} if isinstance(node, slice): return slice(*[partial(getattr(node, attr)) for attr in", "their values. .. note:: This function modifies the context in", "in g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot find {name} in graph", "set_edges(self): for e in self.args: self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self)", "x: x.tolist() if isinstance(x, np.ndarray) else x, indices)) res =", "in self.args] @property def shape(self): \"\"\" tuple : Shape of", "domain = Domain(idx) super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs) @property", "\"\"\" polymath.srdfg.graph.Graph : Parent graph of this node. If graph", "op_name=op_name, dependencies=dependencies, value=value, **kwargs) return n def __bool__(self): return True", "other): return slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self, other): return", "value def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None and", "x, indices)) res = var[indices] if single else np.asarray([var[idx] for", "than one value for any node. ValueError If `context` is", "self.evaluate_dependencies(context, callback) if self in context: return context[self] # Evaluate", "other): return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "isinstance(key, Integral): key = tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C')", "*args, **kwargs): return self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\" Failed to", "[self.instantiate_node(node) for node in fetches] context = self.instantiate_graph(context, **kwargs) for", "context: if c in fetches and c.op_name in [\"output\", \"state\",", "kwargs \\ else f\"{target.__name__}\" if \"domain\" in kwargs: domain =", "'{node}' does not belong to {self} graph, instead belongs to\"", "(\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __matmul__(self, other): return matmul(self,", "if node.op_name in [\"placeholder\", \"state\", \"input\", \"output\", \"temp\"] and not", "\"\"\" Instantiate nodes by retrieving the node object associated with", "__rmul__(self, other): return slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self, other):", "nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next)", "def __gt__(self, other): return slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self,", "name=name, graph=self.graph) elif isinstance(key, tuple): return var_index(self, list(key), name=name, graph=self.graph)", "return self._target @target.setter def target(self, fnc): self._target = fnc self.op_name", "{self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph: graph = self.graph if self._name", "= nodeop(builtins.len) property_ = nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ =", "= c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def", "other): return add(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "key): return self @property def domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self,", "Name of the node or `None` to use a random,", "Node.get_active_graph(graph) @property def gname(self): scope_names = [self.name] cgraph = self.graph", "\"var_index\", \"index\")) else other.__le__(self) def __invert__(self): return inv(self, graph=self.graph) def", "(\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self) def __rand__(self, other): return and_(other,", "= kwargs self.graph = graph self._shape = OrderedDict() self.shape =", "nodeop(operator.gt) index = nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv)", "other): return slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self, other): return", "current ndoe as ewll if self.graph and name in self.graph.nodes:", "isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: domain = Domain(idx) super(var_index, self).__init__(var,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self) def __rlshift__(self, other):", "self, other, graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift, other, self,", "node names with node instances. .. note:: This function modifies", "and add the node to the graph self._name = None", "return mod(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "if self.shape == UNSET_SHAPE: return False for s in self.shape:", "Node instance. Raises ------ ValueError If `node` is not an", "# Get the stack context so we can report where", "__enter__(self): Node._graph_stack.append(self) return self def __exit__(self, *args): assert self ==", "isinstance(key, tuple): ret = var_index(self.var, key, graph=self) else: ret =", ": Operation name which describes the node functionality. \"\"\" return", "unique stringwhich corresponds to the variable, index combination. Parameters ----------", "slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self, other): return slice_op(operator.xor, self,", "the default graph. Raises ------ ValueError If no `Graph` instance", "return slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift,", "self.nodes.keys())) self.nodes = Graph(n) def insert_node(self, node, idx): node_list =", "func_op(target, *args, **kwargs_inner, **kwargs) return _wrapper @nodeop def call(func, *args,", "return slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self, other): return slice_op(operator.mod,", "indices]).reshape(out_shape) if out_shape == (1,) and len(indices) == 1: res", "is None and slice2_var is None: slice1_var, slice2_var = self.args", "d in enumerate(self.domain.dom_set): if shape and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx])", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self) def __ge__(self,", "tuple([]) # Get a list of all dependencies relevant to", "be obtained. \"\"\" graph = graph or Node._graph_stack[-1] return graph", "len(args) == 2: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx", "keyed by variable name. Returns ------- normalized_context : dict[Node, object]", "self.graph = graph self._shape = OrderedDict() self.shape = shape or", "method. name : str or None Name of the node", "\"var_index\", \"index\")) else other.__radd__(self) def __radd__(self, other): return add(other, self,", "if isinstance(arg, Node): if self.__class__.__name__ == \"Node\": self.nodes[arg.name] = self.graph[arg.name]", "use for execution, it can be set using `value`. kwargs", "slice1_var=None, slice2_var=None): if slice1_var is None and slice2_var is None:", "not belong to {self} graph, instead belongs to\" f\" {node.graph}\")", "lshift(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "np.prod(out_shape): raise ValueError(f\"Index list does not match {var.shape} in {self.var.name}", "nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ = nodeop(builtins.bytes)", "nodes. \"\"\" def __init__(self, target, *args, **kwargs): if \"domain\" in", "not in {self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\") ret = self.nodes[name]", "nodeop(operator.mul) ne = nodeop(operator.ne) neg = nodeop(operator.neg) not_ = nodeop(operator.not_)", "ValueError(\"`fetches` must be an `Node` instance, node name, or a", "(slice, Integral)): return getitem(self, key, graph=self.graph) else: if isinstance(key, (list)):", "shape {self.var.shape}\") return True else: return self.var.shape == DEFAULT_SHAPES[0] def", "if isinstance(node, Node) or not is_iterable(node): node = [node] for", "nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq = nodeop(operator.eq)", "self.shape = shape or tuple([]) # Get a list of", "= nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ =", "xor(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self, succ): if isinstance(succ, Node): self._succesors.append(succ.gname)", "the context in place. Use :code:`context=context.copy()` to avoid the context", "== 2: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx =", "if isinstance(key, (int, Node)): key = tuple([key]) if len(key) !=", "isinstance(e, Node): e.add_successor(self) @property def domain(self): return Domain(tuple([])) @property def", "'step']]) return node except Exception as ex: # pragma: no", "tuple : Shape of the output for a node. This", "tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif len(args) == 2:", "def add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key] = value def is_shape_finalized(self):", "slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self, other): return slice_op(operator.lt, self,", "evaluate the node args : tuple positional arguments passed to", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self) def __ror__(self, other): return", "hex_ = nodeop(builtins.hex) next_ = nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_", "for e in self.args: self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self) @property", "aa in self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs) def __call__(self, *args,", "all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([])) self._target =", "node instances. Raises ------ ValueError If the context specifies more", "with `value` already exists in the associated graph. KeyError If", "of integers or parameter node names. graph : Node or", "set_ = nodeop(builtins.set) add = nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter", "def succs(self): return self._preds def add_predecessor(self, pred): if isinstance(pred, Node):", "partial = functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg) for arg", "cgraph = self.graph while cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph return", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self) def __rsub__(self,", "that all `dependencies` are executed before any nodes in this", "single else np.asarray([var[idx] for idx in indices]).reshape(out_shape) if out_shape ==", "with callback(self, context): if self.__class__.__name__ == \"Node\": context[self] = self.value", "isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self, succ): if isinstance(succ,", "def graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent graph of this node.", "The multi-dimensional variable used for indexing into. idx : tuple", "context being modified. Parameters ---------- fetches : list[str or Node]", "builtins import operator from collections import OrderedDict, Mapping, Sequence, deque", "self.__class__.__name__ == \"Node\": self._op_name = self.name else: self._op_name = self.__class__.__name__", "if isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self): for e", "if c.write_count > 0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values", "{key: partial(value) for key, value in self.kwargs.items() if key not", "def __radd__(self, other): return slice_op(operator.add, other, self, graph=self.graph) def __sub__(self,", "node\"\"\" return self._name @property def op_name(self): \"\"\" str : Operation", "self.var.name + \"[\" + \"][\".join(name) + \"]\" if name in", "is called when the decorator is used with arguments if", "= nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint =", "more than one value for any node. ValueError If `context`", "**kwargs): self.nodes = Graph() self.value = value self.dependencies = []", "\"\"\" def __init__(self, target, *args, **kwargs): if \"domain\" in kwargs:", "slice_op(Node): \"\"\" Node representing multi-dimensional operations performed on a node.", "\"var_index\", \"index\")) else other.__or__(self) def __xor__(self, other): return xor(self, other,", "key): if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral):", "# pylint:disable=W0621 \"\"\" Instantiate nodes by retrieving the node object", "def func_hash(self): \"\"\" This returns the functional hash of a", "nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ = nodeop(builtins.round)", "isinstance(key, (list)): return var_index(self, key, name=name, graph=self.graph) elif isinstance(key, tuple):", "or None Callback to be evaluated when an node is", "in self.added_attrs} # Evaluate the node callback = callback or", "!= len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if len(out_shape) > len(var.shape):", "self, graph=self.graph) def __or__(self, other): return slice_op(operator.or_, self, other, graph=self.graph)", "slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain else:", "input_ = nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_", "_wrapper @nodeop def call(func, *args, **kwargs): \"\"\" Call `func` with", ": Node or None Graph to return or `None` to", "Returns ------- values : Node or tuple[object] Output of the", "s.append(d.domain) else: s.append(d) self._shape = tuple(s) def is_scalar(self, val): return", "into. idx : tuple Tuple of either integer values or", "e.add_successor(self) @property def domain(self): return Domain(tuple([])) @property def args(self): \"\"\"", "(list)): return var_index(self, key, name=name, graph=self.graph) elif isinstance(key, tuple): return", "import functools from numbers import Integral, Rational, Real import contextlib", "= nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ =", "names. kwargs : dict[str, object] Additional context information keyed by", "name. RuntimeError If `node` is an `Node` instance but does", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__or__(self) def __xor__(self, other): return", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self) def", "truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "particular node. The default hash returns an object id, whereas", "other): return slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self, other): return", "graph is `None`, this is the top-level graph. \"\"\" return", "\"var_index\", \"index\")) else other.__rxor__(self) def __rxor__(self, other): return xor(other, self,", "self.target(*args, **kwargs) def __call__(self, *args, **kwargs): return call(self, *args, **kwargs)", "min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_", "def _evaluate(self, *args, context=None, **kwargs): \"\"\" Inheriting nodes should implement", "nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub = nodeop(operator.sub) truediv = nodeop(operator.truediv)", "def target(self, fnc): self._target = fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"]", "add = nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat", "True else: return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var, indices,", "return slice1_var, slice1_idx, slice2_var, slice2_idx def __add__(self, other): return slice_op(operator.add,", "@target.setter def target(self, fnc): self._target = fnc self.op_name = f\"{fnc.__name__}\"", "node is executed. args : list Sequence of positional arguments", "of the node\"\"\" return self._name @property def op_name(self): \"\"\" str", "UNSET_SHAPE: return False for s in self.shape: if not isinstance(s,", ": callable or None Callback to be evaluated when an", "var : Node The multi-dimensional variable used for indexing into.", "list(context) # Add the keyword arguments for node in nodes:", "return var_index(self, [key], name=name, graph=self.graph) def set_shape(self, shape=None, init=False): s", "callback) def evaluate(self, context, callback=None): \"\"\" Evaluate the node given", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__or__(self) def __xor__(self,", "tuple(self._args) @property def argnames(self): return [a.name if isinstance(a, Node) else", "res[0] self.domain.set_computed(out_shape, indices) return res def __add__(self, other): return slice_op(operator.add,", "or Node One or more `Node` instances or names to", "assert isinstance(shape, (tuple, list)) if all([isinstance(sv, Integral) for sv in", "round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_", "keys are node instances or names. kwargs : dict[str, object]", "raise ValueError(\"`context` must be a mapping.\") nodes = list(context) #", "or None Shape of the output for a node. This", "Need a way to check if the existing node is", "self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)):", "slice2_var.domain else: slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx", "when the decorator is used without arguments @functools.wraps(target) def _wrapper(*args,", "\"\"\" def __init__(self, var, idx, name=None, **kwargs): # pylint: disable=W0235", "def __rxor__(self, other): return xor(other, self, graph=self.graph) if not _is_node_type_instance(other,", "hash of all attributes and subgraphs of a node. \"\"\"", "given the context. \"\"\" # Evaluate all explicit dependencies first", "is the top-level graph. \"\"\" return self._graph def preds(self): return", "graph=self.graph) elif isinstance(key, tuple): return var_index(self, list(key), name=name, graph=self.graph) else:", "self._name and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] =", "None: return functools.partial(nodeop, **kwargs) # This is called when the", "s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain) else:", "stack traces fname = frame.filename if 'polymath' in fname: continue", "other.__or__(self) def __xor__(self, other): return xor(self, other, graph=self.graph) if not", "args(self): \"\"\" tuple : Positional arguments which are used for", "kwargs self.graph = graph self._shape = OrderedDict() self.shape = shape", "return add(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "default graph. Raises ------ ValueError If no `Graph` instance can", "or_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "does not belong to this graph. \"\"\" if isinstance(node, str):", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__le__(self) def __invert__(self): return", "self self._name = name return self def evaluate_dependencies(self, context, callback=None):", "to this graph. \"\"\" if isinstance(node, str): return self.nodes[node] if", "\"\"\" Ensure that all `dependencies` are executed before any nodes", "not None and self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name]", "or (len(val.shape) == 1 and val.shape[0] == 1)): if self.var.shape", "def __lshift__(self, other): return slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self,", "stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message = \"Failed to evaluate node", "self.nodes[node] if isinstance(node, Node): if node.name not in self.nodes and", "return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "def set_name(self, name): \"\"\" Set the name for a variable", "else: return self.nodes[key] def __add__(self, other): return add(self, other, graph=self.graph)", "\"\"\" Evaluate one or more nodes given a dictionary of", "graph.nodes[name] = self self._name = name return self def __getitem__(self,", "return self.run(*args, **kwargs) class EvaluationError(RuntimeError): \"\"\" Failed to evaluate an", "[] for arg in args: if isinstance(arg, Node): if self.__class__.__name__", "if 'polymath' in fname: continue # Stop tracing at the", "nodeop(builtins.format) len_ = nodeop(builtins.len) property_ = nodeop(builtins.property) type_ = nodeop(builtins.type)", "frame.filename if 'polymath' in fname: continue # Stop tracing at", "If `context` is not a mapping. \"\"\" if context is", "var = np.expand_dims(var, axis=i) else: var = np.squeeze(var) if len(var.shape)", "pragma: no cover messages = [] interactive = False if", "= [self.instantiate_node(node) for node in fetches] context = self.instantiate_graph(context, **kwargs)", "set_name(self, name): \"\"\" Set the name for a variable index,", "help_ = nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_ = nodeop(builtins.setattr) all_", "return g.nodes[name] raise RuntimeError(f\"Cannot find {name} in graph nodes. Graph:", "= nodeop(builtins.set) add = nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter =", "str)): var = np.asarray([var]) elif not isinstance(var, (np.ndarray, list)): raise", "mapping. \"\"\" if context is None: context = {} elif", "other, self, graph=self.graph) def __xor__(self, other): return slice_op(operator.xor, self, other,", "= nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ =", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ne__(self) def __gt__(self,", "# pylint:disable=W0621 value = context.pop(node) node = self.instantiate_node(node) if node", "if isinstance(e, Node): e.add_successor(self) @property def domain(self): return Domain(tuple([])) @property", "the decorator is used without arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner):", "= tuple([shape]) elif isinstance(shape, Node): self._shape = tuple([shape]) elif not", "fname: continue # Stop tracing at the last interactive cell", "cell if interactive and not fname.startswith('<'): break # pragma: no", "not fname.startswith('<'): break # pragma: no cover interactive = fname.startswith('<')", "self.args return var def set_name(self, name): \"\"\" Set the name", "var = np.squeeze(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) !=", "{self.args}\") if self.graph: graph = self.graph if self._name is not", "self.value = value self.dependencies = [] self._args = [] self._predeecessors", "(\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self) def __ror__(self, other): return or_(other,", "\"\"\" # TODO: Need a way to check if the", "`args` and keyword arguments `kwargs`. Parameters ---------- func : callable", "stack_size = 5 evaluated_nodes = 0 def __init__(self, *args, name=None,", "---------- node : Node or str Node instance or name", "= self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([])", "name : str or None Name of the node or", "nodes by retrieving the node object associated with the node", "whose keys are node instances or names. kwargs : dict[str,", "to evaluate the nodes. callback : callable or None Callback", "= nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_ =", "if isinstance(key, Node): name.append(key.name) elif hasattr(key, \"__len__\") and not isinstance(key,", "else other.__le__(self) def __invert__(self): return inv(self, graph=self.graph) def __neg__(self): return", "graph=self.graph) else: return var_index(self, [key], name=name, graph=self.graph) def set_shape(self, shape=None,", "tuple(new_args) @shape.setter def shape(self, shape): self.set_shape(shape, init=True) @graph.setter def graph(self,", "self.set_shape(shape, init=True) @graph.setter def graph(self, graph): self._graph = Node.get_active_graph(graph) @property", "If a node has a default value to use for", "g.nodes[name] raise RuntimeError(f\"Cannot find {name} in graph nodes. Graph: {self.graph}\")", "1)): if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and", "x.tolist() if isinstance(x, np.ndarray) else x, indices)) res = var[indices]", "slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv, other,", "__mod__(self, other): return slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self, other):", "numpy as np import importlib from .graph import Graph from", "= nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat =", "keyword arguments passed to `func`. \"\"\" return func(*args, **kwargs) @contextlib.contextmanager", "return gt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "[node] for n in node: stack = [] if isinstance(n,", "in node: stack = [] if isinstance(n, Node): for frame", "for stateless functions. Parameters ---------- target : callable function to", "**kwargs): \"\"\" Decorator for creating nodes from functions. \"\"\" #", "not an `Node` instance or an node name. RuntimeError If", "node, idx): node_list = list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes =", "not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] ==", "__rxor__(self, other): return xor(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "executed before any nodes in this scope. Parameters ---------- dependencies", "self, graph=self.graph) def __mul__(self, other): return mul(self, other, graph=self.graph) if", "list): var = np.asarray(var) if len(var.shape) != len(out_shape) and np.prod(var.shape)", "*args, **kwargs) def __repr__(self): return \"<func_op '%s' target=%s args=<%d items>>\"", "__rfloordiv__(self, other): return slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self, other):", "kwargs : dict Mapping of keyword arguments passed to `func`.", "for nodes {self}') return self.shape[0] def __iter__(self): num = len(self)", "and keyword arguments `kwargs`. Parameters ---------- func : callable Function", "return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None", "Integral)): s.append(shape[idx]) elif shape and isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d,", "be evaluted before evaluating any nodes defined in this scope.", "= Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove dependencies from the graph", "countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv", "for indexing into. idx : tuple Tuple of either integer", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rlshift__(self) def", "np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1) def", "deque([None]) _eval_stack = [] stack_size = 5 evaluated_nodes = 0", "\"\"\" if isinstance(node, str): return self.nodes[node] if isinstance(node, Node): if", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__or__(self) def", "class var_index(Node): # pylint: disable=C0103,W0223 \"\"\" Node representing values of", "RuntimeError(f\"node '{node}' does not belong to {self} graph, instead belongs", "to avoid the context being modified. Parameters ---------- fetches :", "def __neg__(self): return neg(self, graph=self.graph) def __abs__(self): return abs_(self, graph=self.graph)", "(\"slice_op\", \"var_index\", \"index\")) else other.__lt__(self) def __ge__(self, other): return ge(self,", "== old_key else (k, self.nodes[k]), self.nodes.keys())) self.nodes = Graph(n) def", "node_hash(self) def find_node(self, name): g = self.graph while g is", ": Parent graph of this node. If graph is `None`,", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__or__(self)", "else: domain = Domain(tuple([])) self._target = None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\",", "self._target @target.setter def target(self, fnc): self._target = fnc self.op_name =", "graph=self) else: ret = var_index(self.var, tuple([key]), graph=self) return ret def", "@property def domain(self): return self.kwargs[\"domain\"] def get_index_nodes(self, slice1_var=None, slice2_var=None): if", "shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): self.nodes = Graph() self.value", "Parameters ---------- args : tuple Positional arguments passed to the", "be specified explicitly for nodes {self}') return self.shape[0] def __iter__(self):", "if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if len(out_shape)", "be an `Node` instance, node name, or a \" \"sequence", "\"Node\": context[self] = self.value = self._evaluate(*args, context=context, **kwargs) else: context[self]", "kwargs = {key: partial(value) for key, value in self.kwargs.items() if", "nodeop(operator.sub) truediv = nodeop(operator.truediv) truth = nodeop(operator.truth) xor = nodeop(operator.xor)", "element in node] if isinstance(node, dict): return {partial(key): partial(value) for", "kwargs.pop(\"name\") self.added_attrs = [] # TODO: CHange this to underscore", "nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le)", "def __rshift__(self, other): return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other,", "def __ror__(self, other): return or_(other, self, graph=self.graph) if not _is_node_type_instance(other,", "pylint: disable=protected-access # Do not capture any internal stack traces", "returns an object id, whereas this function returns a hash", "= nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ =", "op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([]) op2_idx =", "= f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return self", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self) def __rtruediv__(self, other): return", "self._succesors.append(succ) def set_edges(self): for e in self.args: self.add_predecessor(e) if isinstance(e,", "node raise ValueError(f\"'{node}' is not an `Node` instance or node", "node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node, tuple):", "self.args = args if \"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs =", "len(self.shape): raise KeyError(f\"Invalid key shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key:", "for a node. This can be a tuple of integers", "self.dependencies = [] if dependencies is None else dependencies if", "dict keywoard arguments passed to the target \"\"\" def __init__(self,", "np.asarray(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape): if", "__init__(self, target, *args, **kwargs): if \"domain\" in kwargs: domain =", "nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id)", "not in self.added_attrs} # Evaluate the node callback = callback", "\"\"\" _graph_stack = deque([None]) _eval_stack = [] stack_size = 5", "enable \"is shape finalized\" to work self._shape = UNSET_SHAPE else:", "already exists in the associated graph. KeyError If the current", "Raises ------ ValueError If an node with `value` already exists", "self.kwargs = kwargs self.graph = graph self._shape = OrderedDict() self.shape", "the node. \"\"\" return self(*args, context, **kwargs) @classmethod def evaluate_node(cls,", "name for a variable index, making sure to replicate the", "not all([(idx_val - 1) >= indices[-1][idx] for idx, idx_val in", "graph=self.graph) def __and__(self, other): return slice_op(operator.and_, self, other, graph=self.graph) def", "return not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0]", "raise ValueError(f\"var_index {self.name} has indices which are greater than the", "else: context[self] = self.value = self._evaluate(*args, **kwargs) return self.value def", "name.append(k.name) else: name.append(str(k)) else: name.append(key) name = self.var.name + \"[\"", "= context.pop(node) node = self.instantiate_node(node) if node in context: raise", "variable name. Returns ------- values : Node or tuple[object] Output", "in the associated graph. \"\"\" # TODO: Need a way", "return self elif self.is_shape_finalized() and len(self.nodes) > 0: if isinstance(key,", "# pylint: disable=W0235 if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\"))", "not isinstance(key, str): for k in key: if isinstance(k, Node):", "---------- graph : Node or None Graph to return or", "is `None`, this is the top-level graph. \"\"\" return self._graph", "------- value : object Output of the node given the", "nodeop(builtins.property) type_ = nodeop(builtins.type) chr_ = nodeop(builtins.chr) frozenset_ = nodeop(builtins.frozenset)", "to the target \"\"\" def __init__(self, target, *args, **kwargs): kwargs[\"op_name\"]", "sub = nodeop(operator.sub) truediv = nodeop(operator.truediv) truth = nodeop(operator.truth) xor", "sub(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self, other): return slice_op(operator.floordiv, self,", "_evaluate(self, op1, op2, context=None, **kwargs): if self.is_scalar(op1) or self.is_scalar(op2): value", "__gt__(self, other): return slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self, other):", "self).__init__(var, idx, name=name, domain=domain, **kwargs) @property def domain(self): return self.kwargs[\"domain\"]", "in kwargs: kwargs.pop(\"name\") self.added_attrs = [] # TODO: CHange this", "in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph: graph =", "isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_", "isinstance(context, Mapping): raise ValueError(\"`context` must be a mapping.\") nodes =", "indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x,", "def __repr__(self): return \"<slice_%s '%s'>\" % (self.target.__name__, self.name) class func_op(Node):", "shape(self): \"\"\" tuple : Shape of the output for a", "tuple(s) def is_scalar(self, val): return not isinstance(val, np.ndarray) or (len(val.shape)", "self.graph while g is not None and name not in", "the variable shape:\\n\" f\"\\tArgs: {self.args}\\n\" f\"\\tVar shape: {var.shape}\\n\" f\"\\tNode shape:", "= self.target(op1, op2) else: arg0_dom = self.args[0].domain arg1_dom = self.args[1].domain", "other.__xor__(self) def __lt__(self, other): return lt(self, other, graph=self.graph) if not", "other): return slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self, other): return", "if len(out_shape) > len(var.shape): for i in range(len(out_shape)): if out_shape[i]", "scope. \"\"\" # Add dependencies to the graph graph =", "node.evaluate(context) return context def run(self, fetches, context=None, *, callback=None, **kwargs):", "------- values : Node or tuple[object] Output of the nodes", "\"\"\" Set the name of the node and update the", "the values. Parameters ---------- context : dict Normalised context in", "more nodes given a dictionary of node names with their", "== hash(other) def __getattr__(self, name): return getattr_(self, name, graph=self.graph) def", "\"\"\"str : Unique name of the node\"\"\" return self._name @property", "@args.setter def args(self, args): new_args = [] for arg in", "given a context. Parameters ---------- context : dict Normalised context", "nodeop(builtins.delattr) hash_ = nodeop(builtins.hash) memoryview_ = nodeop(builtins.memoryview) set_ = nodeop(builtins.set)", "kwargs.pop(\"domain\") elif len(args) == 2: all_args = _flatten_iterable(args) slice1_var, slice1_idx,", "% \\ (self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs): \"\"\" Decorator", "op_name=None, value=None, **kwargs): if len(args) == 0: n = cls(name=name,", "indexing into. idx : tuple Tuple of either integer values", "concat = nodeop(operator.concat) contains = nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem", "call(func, *args, **kwargs): \"\"\" Call `func` with positional arguments `args`", "eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ = nodeop(builtins.open) str_", "not_ = nodeop(operator.not_) or_ = nodeop(operator.or_) pos = nodeop(operator.pos) rshift", "ne(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "float): s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain) else: s.append(d) self._shape =", "if not single and not all([(idx_val - 1) >= indices[-1][idx]", "if out_shape == (1,) and len(indices) == 1: res =", "self, other, graph=self.graph) def __ror__(self, other): return slice_op(operator.or_, other, self,", "used for executing this node. \"\"\" return tuple(self._args) @property def", "ValueError(f\"duplicate unequal value for node '{node}'\") context[node] = value if", "instance or node name\") def instantiate_graph(self, context, **kwargs): \"\"\" Instantiate", "variable, index combination. Parameters ---------- value : str Unique name", "= fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self,", "nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul = nodeop(operator.mul)", "unequal value for node '{node}'\") context[node] = value if node.op_name", "Node) else a for a in self.args] @property def shape(self):", "the currently active graph instance by returning the explicitly given", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__le__(self) def", "making sure to replicate the new name with a unique", "= slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\"", "other.__rpow__(self) def __rpow__(self, other): return pow_(other, self, graph=self.graph) if not", "to the `_evaluate` method. name : str or None Name", "nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains = nodeop(operator.contains)", "nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint = nodeop(operator.length_hint)", "from .domain import Domain from .util import _noop_callback, _flatten_iterable, node_hash,", "target @property def domain(self): return self.kwargs[\"domain\"] def __getitem__(self, key): if", "to check for finalzied shape if self.shape == UNSET_SHAPE: raise", "values of a variable corresponding to input index values. Parameters", "c.name.split(\"/\")]) if c.write_count > 0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name]", "thereof. \"\"\" if isinstance(fetches, (str, Node)): fetches = [fetches] single", "graph=self.graph) def __mod__(self, other): return slice_op(operator.mod, self, other, graph=self.graph) def", "fetches] return values[0] if single else tuple(values) def __getstate__(self): return", "nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ = nodeop(builtins.open) str_ = nodeop(builtins.str)", "nodeop(operator.mod) mul = nodeop(operator.mul) ne = nodeop(operator.ne) neg = nodeop(operator.neg)", "self._name in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name", "Decorator for creating nodes from functions. \"\"\" # This is", "Add dependencies to the graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield", "import builtins import operator from collections import OrderedDict, Mapping, Sequence,", "---------- args : tuple Positional arguments passed to the `_evaluate`", "\"\"\" return node_hash(self) def find_node(self, name): g = self.graph while", "\"GroupNode\"): slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain", "return or_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "= nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex) next_ =", "op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([]) op1 =", "a tuple of integers or parameter node names. \"\"\" return", ": Any or None If a node has a default", "ret def is_scalar(self, val=None): if val is not None and", "func : callable Function to call when the node is", "= self.instantiate_graph(context, **kwargs) for c in context: if c in", "def __getitem__(self, key): if isinstance(key, (tuple, list, np.ndarray)) and len(key)", "self.name else: self._op_name = self.__class__.__name__ @name.setter def name(self, name): self.set_name(name)", "_evaluate(self, *args, context=None, **kwargs): \"\"\" Inheriting nodes should implement this", "else: self._op_name = self.__class__.__name__ @name.setter def name(self, name): self.set_name(name) @args.setter", "name = name or uuid.uuid4().hex # TODO: Need a way", "i in c.name.split(\"/\")]) if c.write_count > 0 else c.name fetches[fetches.index(c)]", "and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var for var index", "var = np.asarray(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) ==", ".graph import Graph from .domain import Domain from .util import", "{out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if not single and", "self in context: return context[self] # Evaluate the parents partial", "attr)) for attr in ['start', 'stop', 'step']]) return node except", "for var index {self} with variable shape {self.var.shape}\") return True", "graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv, other, self, graph=self.graph) def", "nodes: # pylint:disable=W0621 value = context.pop(node) node = self.instantiate_node(node) if", "**kwargs): \"\"\" Inheriting nodes should implement this function to evaluate", "== 1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var for", "nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ = nodeop(builtins.hex)", "method. \"\"\" _graph_stack = deque([None]) _eval_stack = [] stack_size =", "for any node. ValueError If `context` is not a mapping.", "0: # TODO: Change in order to enable \"is shape", "isinstance(node, tuple): return tuple(partial(element) for element in node) if isinstance(node,", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __rpow__(self,", "dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ = nodeop(builtins.min) setattr_", "[] self._args = [] self._predeecessors = [] self._succesors = []", "\"index\")) else other.__rand__(self) def __rand__(self, other): return and_(other, self, graph=self.graph)", "or Node] or str or Node One or more `Node`", "node in context: raise ValueError(f\"duplicate unequal value for node '{node}'\")", "self._evaluate(*args, context=context, **kwargs) else: context[self] = self.value = self._evaluate(*args, **kwargs)", "else kwargs.pop(\"domain\") elif len(args) == 2: all_args = _flatten_iterable(args) slice1_var,", "and subgraphs of a node. \"\"\" return node_hash(self) def find_node(self,", "in node] if isinstance(node, dict): return {partial(key): partial(value) for key,", "% (n, stack) messages.append(message) raise ex from EvaluationError(\"\".join(messages)) @classmethod def", "def __rand__(self, other): return and_(other, self, graph=self.graph) if not _is_node_type_instance(other,", "not capture any internal stack traces fname = frame.filename if", "raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\")", "return ret def is_scalar(self, val=None): if val is not None", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __matmul__(self, other):", "else other.__floordiv__(self) def __mod__(self, other): return mod(self, other, graph=self.graph) if", "sure to replicate the new name with a unique stringwhich", "other, graph=self.graph) def __ror__(self, other): return slice_op(operator.or_, other, self, graph=self.graph)", "self, graph=self.graph) def __xor__(self, other): return slice_op(operator.xor, self, other, graph=self.graph)", "*args, **kwargs_inner, **kwargs) return _wrapper @nodeop def call(func, *args, **kwargs):", "idx): node_list = list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes = Graph(node_list)", "return slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor,", "= nodeop(operator.itemgetter) le = nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift =", "= target self.added_attrs += [\"domain\", \"target\"] @property def target(self): return", "value): self.added_attrs.append(key) self.kwargs[key] = value def is_shape_finalized(self): if self.shape ==", "ValueError(f\"Invalid shape var for var index {self} with variable shape", "or_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "\"var_index\", \"index\")) else other.__lshift__(self) def __rshift__(self, other): return rshift(self, other,", "indices[-1][idx] for idx, idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has", "graph. KeyError If the current name of the node cannot", "nodeop(builtins.next) slice_ = nodeop(builtins.slice) any_ = nodeop(builtins.any) divmod_ = nodeop(builtins.divmod)", "self.nodes and (node.graph != self): raise RuntimeError(f\"node '{node}' does not", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self)", "in kwargs \\ else f\"{target.__name__}\" if \"domain\" in kwargs: domain", "kwargs[\"op_name\"] if \"op_name\" in kwargs \\ else f\"{target.__name__}\" if \"domain\"", "Parameters ---------- node : Node or str Node instance or", "interactive = False if isinstance(node, Node) or not is_iterable(node): node", "f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if not single and not", "out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single = False if", "to evaluate. context : dict or None Context in which", "or not is_iterable(node): node = [node] for n in node:", "return slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift,", "attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains = nodeop(operator.contains) countOf", "issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super) bytes_", "s.append(shape[idx]) elif shape and isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d, float):", "passed to the `_evaluate` method. \"\"\" _graph_stack = deque([None]) _eval_stack", ":code:`context=context.copy()` to avoid the context being modified. Parameters ---------- fetches", "report where the node was defined self._stack = traceback.extract_stack(limit=1) @property", "node. \"\"\" return tuple(self._args) @property def argnames(self): return [a.name if", "!= self): raise RuntimeError(f\"node '{node}' does not belong to {self}", "being modified. Parameters ---------- context : dict[Node or str, object]", "interactive = fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message = \"Failed", "by retrieving the node object associated with the node name.", "or uuid.uuid4().hex # TODO: Need a way to check if", "multi-dimensional operations performed on a node. Parameters ---------- target :", "no cover messages = [] interactive = False if isinstance(node,", "op2) return value def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is", "and c.op_name in [\"output\", \"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for", "format_ = nodeop(builtins.format) len_ = nodeop(builtins.len) property_ = nodeop(builtins.property) type_", "Positional arguments which are used for executing this node. \"\"\"", "def init_from_args(cls, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs):", "import traceback import uuid import numpy as np import importlib", "__rtruediv__(self, other): return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "other, graph=self.graph) def __radd__(self, other): return slice_op(operator.add, other, self, graph=self.graph)", "idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has indices which are", "node = self.nodes[name] if node in context: raise ValueError(f\"duplicate value", "fetches = [self.instantiate_node(node) for node in fetches] context = self.instantiate_graph(context,", "is the top-level graph. op_name : str Operation name which", "other): return slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self, other): return", "DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs): if self.is_scalar(var): out_shape =", "graph=self.graph) def __gt__(self, other): return slice_op(operator.gt, self, other, graph=self.graph) def", "graph=self.graph) def __mul__(self, other): return mul(self, other, graph=self.graph) if not", "slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([])) self._target = None super(func_op, self).__init__(*args,", "nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub = nodeop(operator.sub)", "and (node.graph != self): raise RuntimeError(f\"node '{node}' does not belong", "@property def gname(self): scope_names = [self.name] cgraph = self.graph while", "def op_name(self): \"\"\" str : Operation name which describes the", "__mul__(self, other): return mul(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "other): return mul(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "= 0 def __init__(self, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None,", "other): return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<var_index", "self._name is not None and self._name in graph.nodes: graph.update_graph_key(self._name, name)", "else: name = [] if isinstance(key, Node): name.append(key.name) elif hasattr(key,", "@property def shape(self): \"\"\" tuple : Shape of the output", "= nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ =", "var, idx, name=None, **kwargs): # pylint: disable=W0235 if \"domain\" in", "one or more nodes given a dictionary of node names", "def __matmul__(self, other): return matmul(self, other, graph=self.graph) def __rmatmul__(self, other):", "return slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self, other): return slice_op(operator.lt,", "node except Exception as ex: # pragma: no cover messages", "float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_", "lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "Additional context information keyed by variable name. Returns ------- normalized_context", "in node) if isinstance(node, list): return [partial(element) for element in", "__lt__(self, other): return slice_op(operator.lt, self, other, graph=self.graph) def __le__(self, other):", "1) def _evaluate(self, op1, op2, context=None, **kwargs): if self.is_scalar(op1) or", "self.dependencies = [] self._args = [] self._predeecessors = [] self._succesors", "def update_graph_key(self, old_key, new_key): n = list(map(lambda k: (new_key, self.nodes[k])", "------ ValueError If the context specifies more than one value", "return node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node,", "nodes. \"\"\" def __init__(self, var, idx, name=None, **kwargs): # pylint:", "else: graph.nodes[name] = self self._name = name return self def", "var_index(self, [key], name=name, graph=self.graph) def set_shape(self, shape=None, init=False): s =", "graph.dependencies.extend(dependencies) yield # Remove dependencies from the graph del graph.dependencies[-len(dependencies):]", "which to evaluate the node. callback : callable or None", "KeyError If the current name of the node cannot be", "(Node, Integral)): shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim)) else: raise TypeError(f\"Shape", "(\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self, other): return floordiv(other,", "graph=self.graph) def __pos__(self): return pos(self, graph=self.graph) def __reversed__(self): return reversed_(self,", "self, other, graph=self.graph) def __repr__(self): return \"<slice_%s '%s'>\" % (self.target.__name__,", "isinstance(var, (Integral, Real, str)): var = np.asarray([var]) elif not isinstance(var,", "tuple([key]) if len(key) != len(self.shape): raise KeyError(f\"Invalid key shape for", "else: raise TypeError(f\"Shape value must be placeholder or integer value", "\"index\")) else other.__rpow__(self) def __rpow__(self, other): return pow_(other, self, graph=self.graph)", "arguments passed to the `_evaluate` method. \"\"\" _graph_stack = deque([None])", "self._graph = Node.get_active_graph(graph) @property def gname(self): scope_names = [self.name] cgraph", "must be an `Node` instance, node name, or a \"", "other.__lt__(self) def __ge__(self, other): return ge(self, other, graph=self.graph) if not", "other): return slice_op(operator.add, self, other, graph=self.graph) def __radd__(self, other): return", "= Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def _evaluate(self, *args,", "def __rxor__(self, other): return slice_op(operator.xor, other, self, graph=self.graph) def __lt__(self,", "is used without arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner): return func_op(target,", "this node self.dependencies = [] if dependencies is None else", "shape(self, shape): self.set_shape(shape, init=True) @graph.setter def graph(self, graph): self._graph =", "does not match {var.shape} in {self.var.name} - {self.var.op_name}\" f\"dimensions for", "def __gt__(self, other): return gt(self, other, graph=self.graph) if not _is_node_type_instance(other,", "= self.nodes[name] return ret else: name = [] if isinstance(key,", "self._target = fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] = f\"{fnc.__module__}.{fnc.__name__}\" def", "not in self.nodes and (node.graph != self): raise RuntimeError(f\"node '{node}'", "__rsub__(self, other): return slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self, other):", "isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain elif", "= nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ = nodeop(builtins.help) min_ =", "graph. Parameters ---------- value : str Unique name of the", "Returns ------- instantiated_node : Node Node instance. Raises ------ ValueError", "= self.target(op1, op2) return value def get_index_nodes(self, slice1_var=None, slice2_var=None): if", "parents partial = functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg) for", "a hash of all attributes and subgraphs of a node.", "self.kwargs[key] = value def is_shape_finalized(self): if self.shape == UNSET_SHAPE: return", "len(key) == 0: return self elif self.is_shape_finalized() and len(self.nodes) >", "float): self._shape = tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape = tuple([shape])", "self._op_name = self.__class__.__name__ @name.setter def name(self, name): self.set_name(name) @args.setter def", "other): return slice_op(operator.and_, other, self, graph=self.graph) def __or__(self, other): return", "---------- var : Node The multi-dimensional variable used for indexing", "Node): for frame in reversed(n._stack): # pylint: disable=protected-access # Do", "!= \"Node\": if isinstance(key, (slice, Integral)): return getitem(self, key, graph=self.graph)", "tuple(partial(element) for element in node) if isinstance(node, list): return [partial(element)", "graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove dependencies from the", "tuple of integers or parameter node names. \"\"\" return self._shape", "str Unique name of the node. Returns ------- self :", "all([isinstance(sv, Integral) for sv in shape]) and len(self.domain) == np.product(shape)", "functools.partial(nodeop, **kwargs) # This is called when the decorator is", "args: if isinstance(arg, Node): if self.__class__.__name__ == \"Node\": self.nodes[arg.name] =", "partial(value) for key, value in self.kwargs.items() if key not in", "divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_", "= functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg) for arg in", "self._preds def add_predecessor(self, pred): if isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred)", "names. \"\"\" return self._shape @property def var(self): return self @property", "all dependencies relevant to this node self.dependencies = [] if", "Node Node instance. Raises ------ ValueError If `node` is not", "keyed by variable name. Returns ------- values : Node or", "return pos(self, graph=self.graph) def __reversed__(self): return reversed_(self, graph=self.graph) def update_graph_key(self,", "index values. Parameters ---------- var : Node The multi-dimensional variable", "other, self, graph=self.graph) def __and__(self, other): return slice_op(operator.and_, self, other,", "graph=self.graph) def __abs__(self): return abs_(self, graph=self.graph) def __pos__(self): return pos(self,", "def __eq__(self, other): return hash(self) == hash(other) def __getattr__(self, name):", "graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift, self, other, graph=self.graph) def", "nodeop(target=None, **kwargs): \"\"\" Decorator for creating nodes from functions. \"\"\"", "and len(indices) == 1: res = res[0] self.domain.set_computed(out_shape, indices) return", "classmethod_ = nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_", "be evaluated when an node is evaluated. \"\"\" for node", "context, callback=None): \"\"\" Evaluate the node given a context. Parameters", "\"var_index\", \"index\")) else other.__ge__(self) def __ne__(self, other): return ne(self, other,", "= self.name else: self._op_name = self.__class__.__name__ @name.setter def name(self, name):", "def __ror__(self, other): return slice_op(operator.or_, other, self, graph=self.graph) def __xor__(self,", "Inheriting nodes should implement this function to evaluate the node.", "name): g = self.graph while g is not None and", "Base class for nodes. Parameters ---------- args : tuple Positional", "__mod__(self, other): return mod(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "= nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ =", "single = True else: out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs()", "or `None` to use the default graph. Raises ------ ValueError", "the node is executed. args : list Sequence of positional", "node '{node}'\") context[node] = value if node.op_name in [\"placeholder\", \"state\",", "values. .. note:: This function modifies the context in place.", "slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if", "the node and add the node to the graph self._name", "or a sequence thereof. \"\"\" if isinstance(fetches, (str, Node)): fetches", "\"__len__\") and not isinstance(key, str): for k in key: if", "return var_index(self, key, graph=self) elif isinstance(key, tuple): return var_index(self, list(key),", "**kwargs): for aa in list(kwargs.keys()): if aa in self.added_attrs: kwargs.pop(aa)", "for the node and add the node to the graph", "which describes the node functionality. \"\"\" return self._op_name @op_name.setter def", "write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")]) if c.write_count >", "---------- value : str Unique name of the node. Returns", "self.kwargs.items() if key not in self.added_attrs} # Evaluate the node", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self) def __ne__(self, other): return", "elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([]))", "else other.__mul__(self) def __truediv__(self, other): return truediv(self, other, graph=self.graph) if", "return False for s in self.shape: if not isinstance(s, Integral):", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__and__(self) def __or__(self, other):", "map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_", "output for a node. This can be a tuple of", "if self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must be specified explicitly", "add(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "Node) else tuple([]) op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2", "\"target\"] @property def target(self): return self._target @target.setter def target(self, fnc):", "return slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self, other): return slice_op(operator.and_,", "args=<%d items>>\" % \\ (self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs):", "node, context, **kwargs): \"\"\" Evaluate an node or constant given", "for aa in list(kwargs.keys()): if aa in self.added_attrs: kwargs.pop(aa) return", "internal stack traces fname = frame.filename if 'polymath' in fname:", "get_active_graph(graph=None): \"\"\" Obtain the currently active graph instance by returning", "'polymath' in fname: continue # Stop tracing at the last", "del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict)", "Add the keyword arguments for node in nodes: # pylint:disable=W0621", "graph=self.graph) def __mul__(self, other): return slice_op(operator.mul, self, other, graph=self.graph) def", "\"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) for name, value", "= nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ =", "and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index list does not match", "the graph self._name = None self.name = name or uuid.uuid4().hex", "return self._graph def preds(self): return self._preds def succs(self): return self._preds", "in c.name.split(\"/\")]) if c.write_count > 0 else c.name fetches[fetches.index(c)] =", "name) else: graph.nodes[name] = self self._name = name return self", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self) def", "return add(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "self.nodes.item_by_index(idx) return ret else: if isinstance(key, (list)): ret = var_index(self.var,", "ret = var_index(self.var, key, graph=self) else: ret = var_index(self.var, tuple([key]),", "def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph) def __radd__(self,", "{var.shape}\\n\" f\"\\tNode shape: {self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\") indices =", "node names with their values. .. note:: This function modifies", "`dependencies` are executed before any nodes in this scope. Parameters", "cover interactive = fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message =", "Normalised context in which to evaluate the node. callback :", "\"\"\" Failed to evaluate an node. \"\"\" class var_index(Node): #", "graph. \"\"\" # TODO: Need a way to check if", "None Name of the node or `None` to use a", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self) def __lt__(self, other):", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__or__(self) def __xor__(self, other):", "object Output of the node given the context. \"\"\" #", "{self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph: graph = self.graph", "Function to call when the node is executed. args :", "nodeop(builtins.callable) format_ = nodeop(builtins.format) len_ = nodeop(builtins.len) property_ = nodeop(builtins.property)", "dependencies : list Sequence of nodes to be evaluted before", "and len(shape) > 0: self._shape = shape if isinstance(shape, tuple)", "from numbers import Integral, Rational, Real import contextlib import traceback", "elif isinstance(dim, float): shapes.append(int(dim)) else: raise TypeError(f\"Shape value must be", "__abs__(self): return abs_(self, graph=self.graph) def __pos__(self): return pos(self, graph=self.graph) def", "indices)) res = var[indices] if single else np.asarray([var[idx] for idx", "locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_", "stack = \"\".join(traceback.format_list(reversed(stack))) message = \"Failed to evaluate node `%s`", "[] self.args = args if \"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs", "dict Normalised context in which to evaluate the node. callback", "in args: if isinstance(arg, Node): if self.__class__.__name__ == \"Node\": self.nodes[arg.name]", "values. Parameters ---------- context : dict Normalised context in which", "key, graph=self) elif isinstance(key, tuple): return var_index(self, list(key), graph=self) else:", "node and update the graph. Parameters ---------- value : str", "var_index(self.var, tuple([key]), graph=self) return ret def is_scalar(self, val=None): if val", "\"var_index\", \"index\")) else other.__xor__(self) def __lt__(self, other): return lt(self, other,", "and np.prod(var.shape) == np.prod(out_shape): if len(out_shape) > len(var.shape): for i", "of a node. \"\"\" return node_hash(self) def find_node(self, name): g", "used for indexing into. idx : tuple Tuple of either", "add_successor(self, succ): if isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self):", "self.nodes = Graph(node_list) def __call__(self, *args, **kwargs): return self.run(*args, **kwargs)", "nodeop(operator.invert) ior = nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift)", "list does not match {var.shape} in {self.var.name} - {self.var.op_name}\" f\"dimensions", "nodeop(builtins.classmethod) getattr_ = nodeop(builtins.getattr) locals_ = nodeop(builtins.locals) repr_ = nodeop(builtins.repr)", "self.added_attrs.append(key) self.kwargs[key] = value def is_shape_finalized(self): if self.shape == UNSET_SHAPE:", "DEFAULT_SHAPES import builtins import operator from collections import OrderedDict, Mapping,", "return hash(self) == hash(other) def __getattr__(self, name): return getattr_(self, name,", "#pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_ = nodeop(builtins.dict) help_ =", "\"index\")) else other.__rshift__(self) def __and__(self, other): return and_(self, other, graph=self.graph)", "names to evaluate. context : dict or None Context in", "associated with the node name. Parameters ---------- node : Node", "given a context. \"\"\" Node.evaluated_nodes += 1 try: if isinstance(node,", "---------- context : dict Normalised context in which to evaluate", "arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner): return func_op(target, *args, **kwargs_inner, **kwargs)", "= args if \"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs = []", "_is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([])) return", "RuntimeError If `node` is an `Node` instance but does not", "**kwargs) for c in context: if c in fetches and", "self.graph if self._name and self._name in graph.nodes: graph.update_graph_key(self._name, name) else:", "= Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def __add__(self, other):", "any node. ValueError If `context` is not a mapping. \"\"\"", "isinstance(d, var_index): s.append(d.domain) else: s.append(d) self._shape = tuple(s) def is_scalar(self,", "check for finalzied shape if self.shape == UNSET_SHAPE: raise TypeError(f'`shape`", "nodes {self}') return self.shape[0] def __iter__(self): num = len(self) for", "\"\"\" Evaluate the dependencies of this node and discard the", ": Node Node instance. Raises ------ ValueError If `node` is", "[] interactive = False if isinstance(node, Node) or not is_iterable(node):", "== 1: res = res[0] self.domain.set_computed(out_shape, indices) return res def", "can be obtained. \"\"\" graph = graph or Node._graph_stack[-1] return", "np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)): if", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self)", "the name of the node and update the graph. Parameters", "var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"):", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self) def", "callback : callable or None Callback to be evaluated when", "def __xor__(self, other): return slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self,", "existing node is not equal to the current ndoe as", "n = list(map(lambda k: (new_key, self.nodes[k]) if k == old_key", "other): return slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self, other): return", "[fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches] return values[0] if", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self) def __rrshift__(self, other):", "a random, unique identifier. shape : tuple or None Shape", "for arg in self.args] kwargs = {key: partial(value) for key,", "in indices]).reshape(out_shape) if out_shape == (1,) and len(indices) == 1:", "return slice_op(operator.lt, self, other, graph=self.graph) def __le__(self, other): return slice_op(operator.lt,", "\"GroupNode\"): slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx = slice2_var.domain", "Change in order to enable \"is shape finalized\" to work", "return slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self, other): return slice_op(operator.and_,", "**kwargs): if self.is_scalar(var): out_shape = (1,) indices = (0,) single", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self)", "return abs_(self, graph=self.graph) def __pos__(self): return pos(self, graph=self.graph) def __reversed__(self):", "Parameters ---------- context : dict[Node or str, object] Context whose", "context. \"\"\" # Evaluate all explicit dependencies first self.evaluate_dependencies(context, callback)", "node in nodes: # pylint:disable=W0621 value = context.pop(node) node =", "return mul(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "== 1 and val.shape[0] == 1)): if self.var.shape != DEFAULT_SHAPES[0]", "= [] self._predeecessors = [] self._succesors = [] self.args =", "f\"New: {self.args}\") if self.graph: graph = self.graph if self._name and", "= name return self def evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate", "== 0: # TODO: Change in order to enable \"is", "slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self, other): return slice_op(operator.and_, self,", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self)", "an `Node` instance, node name, or a \" \"sequence thereof.\")", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self) def __le__(self,", "nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem = nodeop(operator.getitem)", "slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self, other): return slice_op(operator.ne, self,", "and isinstance(shape[idx], (func_op, Integral)): s.append(shape[idx]) elif shape and isinstance(shape[idx], float):", "context[self] = self.value = self._evaluate(*args, context=context, **kwargs) else: context[self] =", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__le__(self)", "set_shape(self, shape=None, init=False): if isinstance(shape, float): self._shape = tuple([np.int(shape)]) elif", "Output of the node given the context. \"\"\" # Evaluate", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self)", "range(num): yield self[i] def __eq__(self, other): return hash(self) == hash(other)", "call(self, *args, **kwargs) def __repr__(self): return \"<func_op '%s' target=%s args=<%d", "np.asarray([var[idx] for idx in indices]).reshape(out_shape) if out_shape == (1,) and", "other): return slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self, other): return", "nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv) invert = nodeop(operator.invert)", "and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0]", "[] self._predeecessors = [] self._succesors = [] self.args = args", "np.ndarray) else x, indices)) res = var[indices] if single else", "polymath import UNSET_SHAPE, DEFAULT_SHAPES import builtins import operator from collections", "else: raise ValueError(\"`fetches` must be an `Node` instance, node name,", "name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) return n def", "def __ge__(self, other): return ge(self, other, graph=self.graph) if not _is_node_type_instance(other,", "other.__rmod__(self) def __rmod__(self, other): return mod(other, self, graph=self.graph) if not", "Evaluate the node given a context. Parameters ---------- context :", "other, graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift, other, self, graph=self.graph)", "slice2_var is None: slice1_var, slice2_var = self.args if isinstance(slice1_var, (slice_op,", "f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape = tuple(shapes) @staticmethod def get_active_graph(graph=None):", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self) def", "node in self.dependencies: node.evaluate(context, callback) def evaluate(self, context, callback=None): \"\"\"", "given the context. Raises ------ ValueError If `fetches` is not", "\"index\")) else other.__truediv__(self) def __rtruediv__(self, other): return truediv(other, self, graph=self.graph)", "graph or using the default graph. Parameters ---------- graph :", "ret = var_index(self.var, tuple([key]), graph=self) return ret def is_scalar(self, val=None):", "(slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain else: slice1_idx", "= nodeop(builtins.locals) repr_ = nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ =", "is used with arguments if target is None: return functools.partial(nodeop,", "an node with `value` already exists in the associated graph.", "key, value in self.kwargs.items() if key not in self.added_attrs} #", "values[0] if single else tuple(values) def __getstate__(self): return self.__dict__ def", "graph.nodes[name] = self self._name = name return self def evaluate_dependencies(self,", "\"<var_index name=%s, index=%s>\" % (self.name, self.args) class slice_op(Node): \"\"\" Node", "if \"name\" in kwargs: kwargs.pop(\"name\") self.added_attrs = [] # TODO:", "---------- context : dict[Node or str, object] Context whose keys", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__add__(self)", "of a variable corresponding to input index values. Parameters ----------", "Graph(n) def insert_node(self, node, idx): node_list = list(self.nodes.items()) node_list.insert(idx, (node.name,", "DEFAULT_SHAPES[0] and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid", "- {self.var.op_name}\" f\"dimensions for slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\"", "ValueError(f\"'{node}' is not an `Node` instance or node name\") def", "if single else tuple(values) def __getstate__(self): return self.__dict__ def __setstate__(self,", "def __floordiv__(self, other): return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other,", "\"\"\" graph = graph or Node._graph_stack[-1] return graph def instantiate_node(self,", "Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self): for e in self.args:", "note:: This function modifies the context in place. Use :code:`context=context.copy()`", "by variable name. Returns ------- normalized_context : dict[Node, object] Normalized", "evaluate. context : dict or None Context in which to", "\"\"\" Node.evaluated_nodes += 1 try: if isinstance(node, Node): Node._eval_stack.append(node.name) return", "__truediv__(self, other): return truediv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "k: (new_key, self.nodes[k]) if k == old_key else (k, self.nodes[k]),", "other): return slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self, other): return", "def __rrshift__(self, other): return slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self,", "and slice2_var is None: slice1_var, slice2_var = self.args if isinstance(slice1_var,", "= var_index(self.var, tuple([key]), graph=self) return ret def is_scalar(self, val=None): if", "\"var_index\", \"index\")) else other.__rmul__(self) def __rmul__(self, other): return mul(other, self,", "= nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ =", "isinstance(fetches, (str, Node)): fetches = [fetches] single = True elif", "in context: raise ValueError(f\"duplicate unequal value for node '{node}'\") context[node]", "\"index\")) else other.__mod__(self) def __lshift__(self, other): return lshift(self, other, graph=self.graph)", "return value def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var is None", "`context` is not a mapping. \"\"\" if context is None:", "\"var_index\", \"index\")) else other.__rtruediv__(self) def __floordiv__(self, other): return floordiv(self, other,", "{self.var.name} - {self.var.op_name}\" f\"dimensions for slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain:", "False else: raise ValueError(\"`fetches` must be an `Node` instance, node", "a list of all dependencies relevant to this node self.dependencies", "slice2_var = self.args if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var, \"GroupNode\"):", "\"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")]) if c.write_count > 0 else", "\"index\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var,", "in context: return context[self] # Evaluate the parents partial =", "slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<slice_%s '%s'>\" %", ": Node or str Node instance or name of an", "= self._evaluate(*args, context=context, **kwargs) else: context[self] = self.value = self._evaluate(*args,", "floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "= tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif len(args) ==", "f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target", "ValueError If an node with `value` already exists in the", "`Graph` instance can be obtained. \"\"\" graph = graph or", "= np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2) return", "in reversed(n._stack): # pylint: disable=protected-access # Do not capture any", "Instantiate nodes by retrieving the node object associated with the", "abs_(self, graph=self.graph) def __pos__(self): return pos(self, graph=self.graph) def __reversed__(self): return", "raise TypeError(f\"Shape value must be placeholder or integer value for", "var_index(self, list(key), name=name, graph=self.graph) else: return var_index(self, [key], name=name, graph=self.graph)", "__getstate__(self): return self.__dict__ def __setstate__(self, data): self.__dict__.update(data) def set_name(self, name):", "evaluated. kwargs : dict Additional context information keyed by variable", "slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def __add__(self,", "in self.kwargs.items() if key not in self.added_attrs} # Evaluate the", "gt = nodeop(operator.gt) index = nodeop(operator.index) indexOf = nodeop(operator.indexOf) inv", "= nodeop(operator.mul) ne = nodeop(operator.ne) neg = nodeop(operator.neg) not_ =", "arguments for node in nodes: # pylint:disable=W0621 value = context.pop(node)", "\"\"\" Evaluate the node given a context. Parameters ---------- context", "in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\")", "return slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift,", "__rsub__(self, other): return sub(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "name=name, domain=domain, **kwargs) @property def domain(self): return self.kwargs[\"domain\"] @property def", "= _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain", "elif not shape or len(shape) == 0: # TODO: Change", "\"<func_op '%s' target=%s args=<%d items>>\" % \\ (self.name, self.kwargs[\"target\"], len(self.args))", "value in kwargs.items(): node = self.nodes[name] if node in context:", "\"var_index\", \"index\")) else other.__rpow__(self) def __matmul__(self, other): return matmul(self, other,", "= self.graph if self._name is not None and self._name in", "bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_", "domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif len(args)", "isinstance(dim, (Node, Integral)): shapes.append(dim) elif isinstance(dim, float): shapes.append(int(dim)) else: raise", "and len(key) == 0: return self elif self.is_shape_finalized() and len(self.nodes)", "node is evaluated. kwargs : dict Additional context information keyed", "mapping.\") nodes = list(context) # Add the keyword arguments for", "indices = (0,) single = True else: out_shape = self.domain.shape_from_indices(indices)", "nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable)", "be evaluated when an node is evaluated. kwargs : dict", "partial = functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node, tuple): return tuple(partial(element)", "isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var for var index {self} with", "function to evaluate the node args : tuple positional arguments", "return self @property def name(self): \"\"\"str : Unique name of", "isinstance(d, float): s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain) else: s.append(d) self._shape", "isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable {var} with type {type(var)} is", "shape and isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d)) elif", "instantiated_node : Node Node instance. Raises ------ ValueError If `node`", "node. ValueError If `context` is not a mapping. \"\"\" if", "= nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ =", "TODO: CHange this to underscore private variable self.kwargs = kwargs", "nodeop(builtins.memoryview) set_ = nodeop(builtins.set) add = nodeop(operator.add) and_ = nodeop(operator.and_)", "\"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name,", "if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index", "_eval_stack = [] stack_size = 5 evaluated_nodes = 0 def", "are node instances. Raises ------ ValueError If the context specifies", "nodeop(operator.add) and_ = nodeop(operator.and_) attrgetter = nodeop(operator.attrgetter) concat = nodeop(operator.concat)", "= True elif isinstance(fetches, Sequence): single = False else: raise", "n = cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) else:", "ne = nodeop(operator.ne) neg = nodeop(operator.neg) not_ = nodeop(operator.not_) or_", "variable index, making sure to replicate the new name with", "0: self._shape = shape if isinstance(shape, tuple) else tuple(shape) else:", "a default value to use for execution, it can be", "an `Node` instance or node name\") def instantiate_graph(self, context, **kwargs):", "context. \"\"\" Node.evaluated_nodes += 1 try: if isinstance(node, Node): Node._eval_stack.append(node.name)", "values : Node or tuple[object] Output of the nodes given", "stateless functions. Parameters ---------- target : callable function to evaluate", "else other.__and__(self) def __or__(self, other): return or_(self, other, graph=self.graph) if", "= UNSET_SHAPE else: shapes = [] for dim in shape:", "context[node] = node.evaluate(context) for name, value in kwargs.items(): node =", "if isinstance(k, Node): name.append(k.name) else: name.append(str(k)) else: name.append(key) name =", "ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ = nodeop(builtins.input) oct_", "graph of this node. If graph is `None`, this is", "floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "else: ret = var_index(self.var, tuple([key]), graph=self) return ret def is_scalar(self,", "a list or numpy array, and cannot be sliced for", "op_name(self, op_name): if op_name: self._op_name = op_name elif self.__class__.__name__ ==", "capture any internal stack traces fname = frame.filename if 'polymath'", "class slice_op(Node): \"\"\" Node representing multi-dimensional operations performed on a", "target : cal The multi-dimensional variable used for indexing into.", "integers or parameter node names. graph : Node or None", "\"GroupNode\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([])) if isinstance(slice2_var,", "nodeop(builtins.int) open_ = nodeop(builtins.open) str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool)", "nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not)", "and len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral): key = tuple([key])", "node. \"\"\" return self(*args, context, **kwargs) @classmethod def evaluate_node(cls, node,", "shapes = [] for dim in shape: if isinstance(dim, (Node,", "\"\"\" Evaluate an node or constant given a context. \"\"\"", "executed. args : list Sequence of positional arguments passed to", "in self.shape: if not isinstance(s, Integral): return False return True", "other): return gt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "def __reversed__(self): return reversed_(self, graph=self.graph) def update_graph_key(self, old_key, new_key): n", "before evaluating any nodes defined in this scope. \"\"\" #", "Evaluate an node or constant given a context. \"\"\" Node.evaluated_nodes", "return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return \"<slice_%s '%s'>\"", "variable name. Returns ------- normalized_context : dict[Node, object] Normalized context", "name=None, **kwargs): # pylint: disable=W0235 if \"domain\" in kwargs: domain", "self.nodes.keys(): raise KeyError(f\"{name} not in {self.name} keys:\\n\" f\"Node keys: {list(self.nodes.keys())}\")", "op_name: self._op_name = op_name elif self.__class__.__name__ == \"Node\": self._op_name =", "partial(value) for key, value in node.items()} if isinstance(node, slice): return", "__rand__(self, other): return slice_op(operator.and_, other, self, graph=self.graph) def __or__(self, other):", "self.kwargs[\"domain\"] def __getitem__(self, key): if isinstance(key, (tuple, list, np.ndarray)) and", "elif not isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable {var} with type", "int_ = nodeop(builtins.int) open_ = nodeop(builtins.open) str_ = nodeop(builtins.str) bool_", "return lt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "\\ (self.name, self.kwargs[\"target\"], len(self.args)) def nodeop(target=None, **kwargs): \"\"\" Decorator for", "arguments which are used for executing this node. \"\"\" return", "from polymath import UNSET_SHAPE, DEFAULT_SHAPES import builtins import operator from", "= node.evaluate(context) return context def run(self, fetches, context=None, *, callback=None,", "list[str or Node] or str or Node One or more", "for k in key: if isinstance(k, Node): name.append(k.name) else: name.append(str(k))", "and not node.is_shape_finalized(): context[node] = node.evaluate(context) for name, value in", "= nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter =", "__rmatmul__(self, other): return matmul(other, self, graph=self.graph) def __mul__(self, other): return", "If an node with `value` already exists in the associated", "len(out_shape) > len(var.shape): for i in range(len(out_shape)): if out_shape[i] ==", "we can report where the node was defined self._stack =", "domain(self): return Domain(tuple([])) @property def args(self): \"\"\" tuple : Positional", "implement this function to evaluate the node. \"\"\" return self(*args,", "if self.__class__.__name__ == \"Node\": context[self] = self.value = self._evaluate(*args, context=context,", "= \"\".join(traceback.format_list(reversed(stack))) message = \"Failed to evaluate node `%s` defined", "self, other, graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow, other, self,", "stack context so we can report where the node was", "callback) if self in context: return context[self] # Evaluate the", "when the decorator is used with arguments if target is", "corresponds to the variable, index combination. Parameters ---------- value :", "node `%s` defined at:\\n\\n%s\" % (n, stack) messages.append(message) raise ex", "> 0: if isinstance(key, (int, Node)): key = tuple([key]) if", "= [self.name] cgraph = self.graph while cgraph: scope_names.append(cgraph.name) cgraph =", "self._op_name = self.name else: self._op_name = self.__class__.__name__ @name.setter def name(self,", "be set using `value`. kwargs : dict Keyword arguments passed", "elif shape and isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d))", "Integral, Rational, Real import contextlib import traceback import uuid import", "the current name of the node cannot be found in", "at:\\n\\n%s\" % (n, stack) messages.append(message) raise ex from EvaluationError(\"\".join(messages)) @classmethod", "if not isinstance(s, Integral): return False return True def set_shape(self,", "instance by returning the explicitly given graph or using the", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self) def __truediv__(self,", "var def set_name(self, name): \"\"\" Set the name for a", "if isinstance(key, (tuple, list, np.ndarray)) and len(key) == 0: return", "name): \"\"\" Set the name for a variable index, making", "dependencies first self.evaluate_dependencies(context, callback) if self in context: return context[self]", "= tuple([key]) if len(key) != len(self.shape): raise KeyError(f\"Invalid key shape", "nodes. callback : callable or None Callback to be evaluated", "in {self.var.name} - {self.var.op_name}\" f\"dimensions for slice {self.args[0].name} with {out_shape}.\\n\"", ": cal The multi-dimensional variable used for indexing into. idx", "= nodeop(operator.or_) pos = nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem =", "# Do not capture any internal stack traces fname =", "# Add the keyword arguments for node in nodes: #", "self.args[0].domain arg1_dom = self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node)", "for a variable index, making sure to replicate the new", "dict): return {partial(key): partial(value) for key, value in node.items()} if", "when an node is evaluated. \"\"\" for node in self.dependencies:", "= var[indices] if single else np.asarray([var[idx] for idx in indices]).reshape(out_shape)", "for {self.name}\") elif isinstance(var, list): var = np.asarray(var) if len(var.shape)", "tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\" Obtain the currently active graph", "slice(*[partial(getattr(node, attr)) for attr in ['start', 'stop', 'step']]) return node", "val.shape[0] == 1)): if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape) ==", "to use for execution, it can be set using `value`.", "None Graph to return or `None` to use the default", "nodes. Parameters ---------- args : tuple Positional arguments passed to", "nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ = nodeop(builtins.complex)", "**kwargs): return call(self, *args, **kwargs) def __repr__(self): return \"<func_op '%s'", "= \"/\".join([f\"{i}{c.write_count-1}\" for i in c.name.split(\"/\")]) if c.write_count > 0", "target(self, fnc): self._target = fnc self.op_name = f\"{fnc.__name__}\" self.kwargs[\"target\"] =", "slice1_var, slice1_idx, slice2_var, slice2_idx def _evaluate(self, *args, **kwargs): for aa", "if isinstance(key, (slice, Integral)): return getitem(self, key, graph=self.graph) else: if", "was defined self._stack = traceback.extract_stack(limit=1) @property def graph(self): \"\"\" polymath.srdfg.graph.Graph", "mod(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack: {Node._eval_stack}\") if not single", "__rfloordiv__(self, other): return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "to `func`. \"\"\" return func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None):", "graph=self.graph) def __getitem__(self, key): if self.__class__.__name__ != \"Node\": if isinstance(key,", "other): return slice_op(operator.mul, other, self, graph=self.graph) def __truediv__(self, other): return", "or integer value for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape", "or name of an node. Returns ------- instantiated_node : Node", "other): return lshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "callback=None, **kwargs): \"\"\" Evaluate one or more nodes given a", "raise ex from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args, name=None, shape=None,", "str): return self.nodes[node] if isinstance(node, Node): if node.name not in", "= {key: partial(value) for key, value in self.kwargs.items() if key", "fetches = [fetches] single = True elif isinstance(fetches, Sequence): single", "other.__rand__(self) def __rand__(self, other): return and_(other, self, graph=self.graph) if not", "elif isinstance(shape, Node): self._shape = tuple([shape]) elif not shape or", "0: if isinstance(key, (int, Node)): key = tuple([key]) if len(key)", "name not in self.nodes.keys(): raise KeyError(f\"{name} not in {self.name} keys:\\n\"", "instances or names to evaluate. context : dict or None", "is an `Node` instance but does not belong to this", "value = self.target(op1, op2) else: arg0_dom = self.args[0].domain arg1_dom =", "self, other, graph=self.graph) def __radd__(self, other): return slice_op(operator.add, other, self,", "Update this to check for finalzied shape if self.shape ==", "slice_op(operator.add, other, self, graph=self.graph) def __sub__(self, other): return slice_op(operator.sub, self,", "self.target = target self.added_attrs += [\"domain\", \"target\"] @property def target(self):", "is_iterable class Node(object): \"\"\" Base class for nodes. Parameters ----------", ": callable Function to call when the node is executed.", "= nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ =", "= var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple): ret = var_index(self.var,", "disable=C0103,R0903 \"\"\" Node wrapper for stateless functions. Parameters ---------- target", "the stack context so we can report where the node", "TypeError(f\"Shape value must be placeholder or integer value for {self.name}\\n\"", "= np.asarray(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) == np.prod(out_shape):", "return func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\" Ensure that", "node cannot be found in the associated graph. \"\"\" name", "= nodeop(operator.truediv) truth = nodeop(operator.truth) xor = nodeop(operator.xor) import_ =", "Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def _evaluate(self, *args, **kwargs):", "le(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "active graph instance by returning the explicitly given graph or", "return self._preds def add_predecessor(self, pred): if isinstance(pred, Node): self._predecessors.append(pred.gname) else:", "\"input\", \"output\", \"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) for", "instance can be obtained. \"\"\" graph = graph or Node._graph_stack[-1]", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self)", "other.__and__(self) def __or__(self, other): return or_(self, other, graph=self.graph) if not", "nodeop(operator.neg) not_ = nodeop(operator.not_) or_ = nodeop(operator.or_) pos = nodeop(operator.pos)", "to evaluate node `%s` defined at:\\n\\n%s\" % (n, stack) messages.append(message)", "raise RuntimeError(f\"node '{node}' does not belong to {self} graph, instead", "`node` is an `Node` instance but does not belong to", "__getitem__(self, key): return self @property def domain(self): return self.kwargs[\"domain\"] def", "value to use for execution, it can be set using", "ior = nodeop(operator.ior) ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_", "evaluate node `%s` defined at:\\n\\n%s\" % (n, stack) messages.append(message) raise", "context[self] # Evaluate the parents partial = functools.partial(self.evaluate_node, context=context, callback=callback)", "**kwargs): \"\"\" Evaluate one or more nodes given a dictionary", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self) def __le__(self, other):", "self.target(op1, op2) return value def get_index_nodes(self, slice1_var=None, slice2_var=None): if slice1_var", "def __invert__(self): return inv(self, graph=self.graph) def __neg__(self): return neg(self, graph=self.graph)", "Domain(tuple([])) self._target = None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target", "= tuple(new_args) @shape.setter def shape(self, shape): self.set_shape(shape, init=True) @graph.setter def", "self.__dict__.update(data) def set_name(self, name): \"\"\" Set the name of the", "in this scope. \"\"\" # Add dependencies to the graph", "!= np.prod(out_shape): raise ValueError(f\"Index list does not match {var.shape} in", "isinstance(shape, tuple) else tuple(shape) else: for idx, d in enumerate(self.domain.dom_set):", "\"var_index\", \"index\")) else other.__floordiv__(self) def __mod__(self, other): return mod(self, other,", "and_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "other, self, graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift, self, other,", "__floordiv__(self, other): return slice_op(operator.floordiv, self, other, graph=self.graph) def __rfloordiv__(self, other):", "__pos__(self): return pos(self, graph=self.graph) def __reversed__(self): return reversed_(self, graph=self.graph) def", "other): return and_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "\"\"\" def __init__(self, target, *args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if", "bounds: {indices[-1]}\") indices = list(map(lambda x: x.tolist() if isinstance(x, np.ndarray)", "not a mapping. \"\"\" if context is None: context =", "the node cannot be found in the associated graph. \"\"\"", "nodes = list(context) # Add the keyword arguments for node", "if op_name: self._op_name = op_name elif self.__class__.__name__ == \"Node\": self._op_name", "avoid the context being modified. Parameters ---------- context : dict[Node", "> len(var.shape): for i in range(len(out_shape)): if out_shape[i] == 1:", "def __rmod__(self, other): return slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self,", "elif isinstance(d, var_index): s.append(d.domain) else: s.append(d) self._shape = tuple(s) def", "(\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self) def __floordiv__(self, other): return floordiv(self,", "= nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not =", "for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name = f\"{self.name}{key}\" if", "the context specifies more than one value for any node.", "TypeError(f\"Variable {var} with type {type(var)} is not a list or", "isinstance(self.args[0], Node) else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node)", "__getitem__(self, key): if isinstance(key, (tuple, list, np.ndarray)) and len(key) ==", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self) def __rshift__(self,", "given graph or using the default graph. Parameters ---------- graph", "nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter)", "else kwargs.pop(\"domain\") else: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx", "Parameters ---------- dependencies : list Sequence of nodes to be", "node object associated with the node name. Parameters ---------- node", "globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_", "to be evaluted before evaluating any nodes defined in this", "print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_", "\"var_index\", \"index\")) else other.__rlshift__(self) def __rlshift__(self, other): return lshift(other, self,", "slice): return slice(*[partial(getattr(node, attr)) for attr in ['start', 'stop', 'step']])", "pred): if isinstance(pred, Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self, succ):", "\"[\" + \"][\".join(name) + \"]\" if name in self.graph.nodes: return", "isinstance(shape[idx], float): s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d)) elif isinstance(d, var_index):", "name.append(str(k)) else: name.append(key) name = self.var.name + \"[\" + \"][\".join(name)", "self._stack = traceback.extract_stack(limit=1) @property def graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent", ":code:`context=context.copy()` to avoid the context being modified. Parameters ---------- context", "if self.graph and name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}'", "len(self.domain) == np.product(shape) and len(shape) > 0: self._shape = shape", "the graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_", "`None`, this is the top-level graph. op_name : str Operation", "run(self, fetches, context=None, *, callback=None, **kwargs): \"\"\" Evaluate one or", "or more `Node` instances or names to evaluate. context :", "class EvaluationError(RuntimeError): \"\"\" Failed to evaluate an node. \"\"\" class", "args(self, args): new_args = [] for arg in args: if", "dict[str, object] Additional context information keyed by variable name. Returns", "attributes and subgraphs of a node. \"\"\" return node_hash(self) def", "Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove dependencies from the graph del", "else other.__rshift__(self) def __and__(self, other): return and_(self, other, graph=self.graph) if", "self._shape = tuple([shape]) elif not shape or len(shape) == 0:", "value : object Output of the node given the context.", "name = f\"{self.name}{key}\" if name not in self.nodes.keys(): raise KeyError(f\"{name}", "= self.args[0].domain arg1_dom = self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0],", "Sequence of positional arguments passed to `func`. kwargs : dict", "---------- target : cal The multi-dimensional variable used for indexing", "domain = Domain(tuple([])) self._target = None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain,", "not shape or len(shape) == 0: # TODO: Change in", "a context. \"\"\" Node.evaluated_nodes += 1 try: if isinstance(node, Node):", "a particular node. The default hash returns an object id,", "not equal to the current ndoe as ewll if self.graph", "domain=domain, **kwargs) self.target = target self.added_attrs += [\"domain\", \"target\"] @property", "= nodeop(operator.setitem) sub = nodeop(operator.sub) truediv = nodeop(operator.truediv) truth =", "(self.name, self.args) class slice_op(Node): \"\"\" Node representing multi-dimensional operations performed", "self.dependencies: node.evaluate(context, callback) def evaluate(self, context, callback=None): \"\"\" Evaluate the", "be evaluated when an node is evaluated. Returns ------- value", "Parent graph of this node. If graph is `None`, this", "slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx)", "args : list Sequence of positional arguments passed to `func`.", "\"\"\" name = name or uuid.uuid4().hex # TODO: Need a", "return {partial(key): partial(value) for key, value in node.items()} if isinstance(node,", "__setstate__(self, data): self.__dict__.update(data) def set_name(self, name): \"\"\" Set the name", "other, graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift, other, self, graph=self.graph)", "= [fetches] single = True elif isinstance(fetches, Sequence): single =", "_is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([])) return", "# This is called when the decorator is used with", "explicitly given graph or using the default graph. Parameters ----------", "nodeop(builtins.reversed) complex_ = nodeop(builtins.complex) hasattr_ = nodeop(builtins.hasattr) max_ = nodeop(builtins.max)", "= nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ =", "attr in ['start', 'stop', 'step']]) return node except Exception as", "__ne__(self, other): return ne(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "is not None and (not isinstance(val, np.ndarray) or (len(val.shape) ==", "and val.shape[0] == 1)): if self.var.shape != DEFAULT_SHAPES[0] and (len(self.var.shape)", "node is evaluated. Returns ------- value : object Output of", "s = [] assert isinstance(shape, (tuple, list)) if all([isinstance(sv, Integral)", "context. Parameters ---------- context : dict Normalised context in which", "from EvaluationError(\"\".join(messages)) @classmethod def init_from_args(cls, *args, name=None, shape=None, graph=None, dependencies=None,", "an object id, whereas this function returns a hash of", "if isinstance(node, dict): return {partial(key): partial(value) for key, value in", "f\"\\tIndex Upper bounds: {indices[-1]}\") indices = list(map(lambda x: x.tolist() if", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self) def __rand__(self, other): return", "succ): if isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self): for", "= OrderedDict() self.shape = shape or tuple([]) # Get a", "self.shape == UNSET_SHAPE: raise TypeError(f'`shape` must be specified explicitly for", "or using the default graph. Parameters ---------- graph : Node", "def __repr__(self): return \"<node '%s'>\" % self.name def add_attribute(self, key,", "return or_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "res = var[indices] if single else np.asarray([var[idx] for idx in", "return Domain(tuple([])) @property def args(self): \"\"\" tuple : Positional arguments", "{node.graph}\") return node raise ValueError(f\"'{node}' is not an `Node` instance", "return slice_op(operator.lt, other, self, graph=self.graph) def __ne__(self, other): return slice_op(operator.ne,", "= nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul = nodeop(operator.mul) ne =", "\"GroupNode\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([])) return slice1_var,", "executing this node. \"\"\" return tuple(self._args) @property def argnames(self): return", "isinstance(node, dict): return {partial(key): partial(value) for key, value in node.items()}", "node = [node] for n in node: stack = []", "slice1_var.domain elif _is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain else: slice1_idx =", "= f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return self @property def domain(self):", "*, callback=None, **kwargs): \"\"\" Evaluate one or more nodes given", "callable function to evaluate the node args : tuple positional", "{self.name}\") elif isinstance(var, list): var = np.asarray(var) if len(var.shape) !=", "else: return var_index(self, [key], name=name, graph=self.graph) def set_shape(self, shape=None, init=False):", "target is None: return functools.partial(nodeop, **kwargs) # This is called", "ret else: name = [] if isinstance(key, Node): name.append(key.name) elif", "or _noop_callback with callback(self, context): if self.__class__.__name__ == \"Node\": context[self]", "a variable corresponding to input index values. Parameters ---------- var", "is_scalar(self, val=None): if val is not None and (not isinstance(val,", "Set the name for a variable index, making sure to", "key, value in node.items()} if isinstance(node, slice): return slice(*[partial(getattr(node, attr))", "= nodeop(operator.getitem) gt = nodeop(operator.gt) index = nodeop(operator.index) indexOf =", "nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ = nodeop(builtins.open)", "__add__(self, other): return slice_op(operator.add, self, other, graph=self.graph) def __radd__(self, other):", "value if node.op_name in [\"placeholder\", \"state\", \"input\", \"output\", \"temp\"] and", "modified. Parameters ---------- fetches : list[str or Node] or str", "in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New: {self.args}\") if self.graph: graph =", "import OrderedDict, Mapping, Sequence, deque import functools from numbers import", "self._name = None self.name = name or uuid.uuid4().hex self._op_name =", "def set_shape(self, shape=None, init=False): s = [] assert isinstance(shape, (tuple,", "target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\", **kwargs) self.target = target @property def domain(self):", "obtained. \"\"\" graph = graph or Node._graph_stack[-1] return graph def", "functional hash of a particular node. The default hash returns", "[] stack_size = 5 evaluated_nodes = 0 def __init__(self, *args,", "if self.is_shape_finalized() and len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral): key", "def __sub__(self, other): return sub(self, other, graph=self.graph) if not _is_node_type_instance(other,", "Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial = functools.partial(cls.evaluate_node, context=context, **kwargs)", "scope_names = [self.name] cgraph = self.graph while cgraph: scope_names.append(cgraph.name) cgraph", "Node instance or name of an node. Returns ------- instantiated_node", "ValueError If `fetches` is not an `Node` instance, node name,", "or parameter node names. \"\"\" return self._shape @property def var(self):", "= self.graph while cgraph: scope_names.append(cgraph.name) cgraph = cgraph.graph return \"/\".join(list(reversed(scope_names)))", "or index/index_op nodes. \"\"\" def __init__(self, var, idx, name=None, **kwargs):", "None: slice1_var, slice2_var = self.args if isinstance(slice1_var, (slice_op, var_index)) or", "**kwargs): if len(args) == 0: n = cls(name=name, shape=shape, graph=graph,", "def __init__(self, target, *args, **kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\"", "not a list or numpy array, and cannot be sliced", "fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message = \"Failed to evaluate", "__lshift__(self, other): return slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self, other):", "== 1) def _evaluate(self, op1, op2, context=None, **kwargs): if self.is_scalar(op1)", "def __rmatmul__(self, other): return matmul(other, self, graph=self.graph) def __mul__(self, other):", "else: return var_index(self, [key], graph=self) else: return self.nodes[key] def __add__(self,", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rsub__(self) def __rsub__(self, other):", "return xor(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "nodes in this scope. Parameters ---------- dependencies : list Sequence", "return sub(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "list)): raise TypeError(f\"Variable {var} with type {type(var)} is not a", "---------- dependencies : list Sequence of nodes to be evaluted", "1 try: if isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context, **kwargs) partial", "other): return ge(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "name of the node\"\"\" return self._name @property def op_name(self): \"\"\"", "= self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs:", "other.__mod__(self) def __lshift__(self, other): return lshift(self, other, graph=self.graph) if not", "Keyword arguments passed to the `_evaluate` method. \"\"\" _graph_stack =", "def __rmod__(self, other): return mod(other, self, graph=self.graph) if not _is_node_type_instance(other,", "self._shape @property def var(self): return self @property def name(self): \"\"\"str", "or len(shape) == 0: # TODO: Change in order to", "when an node is evaluated. kwargs : dict Additional context", "self, other, graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod, other, self,", "list(kwargs.keys()): if aa in self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs) def", "graph. \"\"\" if isinstance(node, str): return self.nodes[node] if isinstance(node, Node):", "self._args = [] self._predeecessors = [] self._succesors = [] self.args", "reversed_(self, graph=self.graph) def update_graph_key(self, old_key, new_key): n = list(map(lambda k:", "**kwargs): # pylint: disable=W0235 if \"domain\" in kwargs: domain =", "return False return True def set_shape(self, shape=None, init=False): if isinstance(shape,", "If the context specifies more than one value for any", "# Stop tracing at the last interactive cell if interactive", "lt = nodeop(operator.lt) matmul = nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod", "graph(self): \"\"\" polymath.srdfg.graph.Graph : Parent graph of this node. If", "the dependencies of this node and discard the values. Parameters", "all node names with node instances. .. note:: This function", "var(self): return self @property def name(self): \"\"\"str : Unique name", "return slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow,", "name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): if len(args) ==", "Graph from .domain import Domain from .util import _noop_callback, _flatten_iterable,", "default graph. Parameters ---------- graph : Node or None Graph", "= nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed) complex_ =", "if self.graph: graph = self.graph if self._name is not None", "isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)", "callback=callback) for fetch in fetches] return values[0] if single else", "{type(var)} is not a list or numpy array, and cannot", "for node in nodes: # pylint:disable=W0621 value = context.pop(node) node", "slice_op(operator.lshift, self, other, graph=self.graph) def __rlshift__(self, other): return slice_op(operator.lshift, other,", "graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) else: n = cls(*args, name=name,", "wrapper for stateless functions. Parameters ---------- target : callable function", "other, self, graph=self.graph) def __lt__(self, other): return slice_op(operator.lt, self, other,", "with variable shape {self.var.shape}\") return True else: return self.var.shape ==", "+ \"][\".join(name) + \"]\" if name in self.graph.nodes: return self.graph.nodes[name]", "= nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin) eval_ =", "name with a unique stringwhich corresponds to the variable, index", "range_ = nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_", ".. note:: This function modifies the context in place. Use", "return \"<var_index name=%s, index=%s>\" % (self.name, self.args) class slice_op(Node): \"\"\"", "yield # Remove dependencies from the graph del graph.dependencies[-len(dependencies):] #pylint:", "= self.instantiate_node(node) if node in context: raise ValueError(f\"duplicate unequal value", "is_ = nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le", "= None self.op_name = op_name # Get the stack context", "other): return sub(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "isinstance(succ, Node): self._succesors.append(succ.gname) else: self._succesors.append(succ) def set_edges(self): for e in", "in graph.nodes: graph.update_graph_key(self._name, name) else: graph.nodes[name] = self self._name =", "graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod, other, self, graph=self.graph) def", "1: res = res[0] self.domain.set_computed(out_shape, indices) return res def __add__(self,", "of integers or parameter node names. \"\"\" return self._shape @property", "np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2) return value", "\\ _is_node_type_instance, is_iterable class Node(object): \"\"\" Base class for nodes.", "__floordiv__(self, other): return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "= nodeop(builtins.setattr) all_ = nodeop(builtins.all) dir_ = nodeop(builtins.dir) hex_ =", ": dict Normalised context in which to evaluate the node.", "getattr_(self, name, graph=self.graph) def __getitem__(self, key): if self.__class__.__name__ != \"Node\":", "fname.startswith('<'): break # pragma: no cover interactive = fname.startswith('<') stack.append(frame)", "list(map(lambda k: (new_key, self.nodes[k]) if k == old_key else (k,", ": Node The multi-dimensional variable used for indexing into. idx", "**kwargs): \"\"\" Call `func` with positional arguments `args` and keyword", "neg = nodeop(operator.neg) not_ = nodeop(operator.not_) or_ = nodeop(operator.or_) pos", "f\"New: {self.args}\") if self.graph: graph = self.graph if self._name is", "sub(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "passed to `func`. \"\"\" return func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies,", "value=value, **kwargs) return n def __bool__(self): return True def __hash__(self):", "var_index(self, list(key), graph=self) else: return var_index(self, [key], graph=self) else: return", "= frame.filename if 'polymath' in fname: continue # Stop tracing", "else other.__lshift__(self) def __rshift__(self, other): return rshift(self, other, graph=self.graph) if", "raise ValueError(f\"'{node}' is not an `Node` instance or node name\")", "return self def __exit__(self, *args): assert self == Node._graph_stack.pop() def", "Get a list of all dependencies relevant to this node", "no cover interactive = fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message", "max_ = nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_", "for s in self.shape: if not isinstance(s, Integral): return False", "single = False else: raise ValueError(\"`fetches` must be an `Node`", "if self.__class__.__name__ == \"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args =", "\"index\")) else other.__sub__(self) def __pow__(self, other): return pow_(self, other, graph=self.graph)", "str : Operation name which describes the node functionality. \"\"\"", "__pow__(self, other): return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context,", "execution, it can be set using `value`. kwargs : dict", "= [] if dependencies is None else dependencies if self.graph:", "__repr__(self): return \"<node '%s'>\" % self.name def add_attribute(self, key, value):", "ipow = nodeop(operator.ipow) irshift = nodeop(operator.irshift) is_ = nodeop(operator.is_) is_not", "= name or uuid.uuid4().hex # TODO: Need a way to", "\"\"\" Base class for nodes. Parameters ---------- args : tuple", "self._shape = tuple([shape]) elif isinstance(shape, Node): self._shape = tuple([shape]) elif", "elif isinstance(fetches, Sequence): single = False else: raise ValueError(\"`fetches` must", "slice_op(operator.add, self, other, graph=self.graph) def __radd__(self, other): return slice_op(operator.add, other,", "__len__(self): #TODO: Update this to check for finalzied shape if", "context=context, **kwargs) if isinstance(node, tuple): return tuple(partial(element) for element in", "{self.var.shape}\\n\" f\"\\tIndex Upper bounds: {indices[-1]}\") indices = list(map(lambda x: x.tolist()", "nodeop(operator.attrgetter) concat = nodeop(operator.concat) contains = nodeop(operator.contains) countOf = nodeop(operator.countOf)", "used without arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner): return func_op(target, *args,", "Parameters ---------- context : dict Normalised context in which to", "the node name. Parameters ---------- node : Node or str", "but does not belong to this graph. \"\"\" if isinstance(node,", "index, making sure to replicate the new name with a", "pylint:disable=W0621 value = context.pop(node) node = self.instantiate_node(node) if node in", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self)", "or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain elif _is_node_type_instance(slice2_var, \"index\"): slice2_idx", "name which describes the node functionality. value : Any or", "self.graph: self.dependencies.extend(self.graph.dependencies) # Choose a name for the node and", "graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) return n def __bool__(self): return", "node. \"\"\" return node_hash(self) def find_node(self, name): g = self.graph", "= nodeop(builtins.sorted) ascii_ = nodeop(builtins.ascii) enumerate_ = nodeop(builtins.enumerate) input_ =", "np.squeeze(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise", "iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_", "\"index\")) else other.__rlshift__(self) def __rlshift__(self, other): return lshift(other, self, graph=self.graph)", ": callable function to evaluate the node args : tuple", "__pow__(self, other): return slice_op(builtins.pow, self, other, graph=self.graph) def __rpow__(self, other):", "return self def evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate the dependencies", "nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass)", "and len(self.domain) == np.product(shape) and len(shape) > 0: self._shape =", "args : tuple positional arguments passed to the target kwargs", "= nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ = nodeop(builtins.range) vars_ =", "This function modifies the context in place. Use :code:`context=context.copy()` to", "def get_active_graph(graph=None): \"\"\" Obtain the currently active graph instance by", "------ ValueError If `fetches` is not an `Node` instance, node", "def __sub__(self, other): return slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self,", "if isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain", "= target @property def domain(self): return self.kwargs[\"domain\"] def __getitem__(self, key):", "context : dict or None Context in which to evaluate", "for nodes. Parameters ---------- args : tuple Positional arguments passed", "self self._name = name return self def __getitem__(self, key): if", "tuple) else tuple(shape) else: for idx, d in enumerate(self.domain.dom_set): if", "= self.graph while g is not None and name not", "with node instances. .. note:: This function modifies the context", "return context def run(self, fetches, context=None, *, callback=None, **kwargs): \"\"\"", "ret else: if isinstance(key, (list)): ret = var_index(self.var, tuple(key), graph=self)", "@property def op_name(self): \"\"\" str : Operation name which describes", "list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes = Graph(node_list) def __call__(self, *args,", "def __rlshift__(self, other): return slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self,", "as ex: # pragma: no cover messages = [] interactive", "return slice_op(operator.mod, self, other, graph=self.graph) def __rmod__(self, other): return slice_op(operator.mod,", "tuple positional arguments passed to the target kwargs : dict", "{name} in graph nodes. Graph: {self.graph}\") def __len__(self): #TODO: Update", "kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain, op_name=f\"slice_{target.__name__}\",", "f\"Key: {key}\") name = f\"{self.name}{key}\" if name not in self.nodes.keys():", "to evaluate an node. \"\"\" class var_index(Node): # pylint: disable=C0103,W0223", "and name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\"", "node callback = callback or _noop_callback with callback(self, context): if", "else np.asarray([var[idx] for idx in indices]).reshape(out_shape) if out_shape == (1,)", "in list(kwargs.keys()): if aa in self.added_attrs: kwargs.pop(aa) return self.target(*args, **kwargs)", "i in range(len(out_shape)): if out_shape[i] == 1: var = np.expand_dims(var,", "graph=self) else: return var_index(self, [key], graph=self) else: return self.nodes[key] def", "__rlshift__(self, other): return slice_op(operator.lshift, other, self, graph=self.graph) def __rshift__(self, other):", "node was defined self._stack = traceback.extract_stack(limit=1) @property def graph(self): \"\"\"", "reversed(n._stack): # pylint: disable=protected-access # Do not capture any internal", "= nodeop(operator.matmul) methodcaller = nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul =", "= {} elif not isinstance(context, Mapping): raise ValueError(\"`context` must be", "representing values of a variable corresponding to input index values.", "__ge__(self, other): return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self): return", "\"<slice_%s '%s'>\" % (self.target.__name__, self.name) class func_op(Node): # pylint: disable=C0103,R0903", "to this node self.dependencies = [] if dependencies is None", "Callback to be evaluated when an node is evaluated. kwargs", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__floordiv__(self) def __mod__(self, other): return", "f\"dimensions for slice {self.args[0].name} with {out_shape}.\\n\" f\"Domain: {self.domain}\\n\" f\"Eval Stack:", "__lt__(self, other): return lt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([])) return slice1_var, slice1_idx,", "import operator from collections import OrderedDict, Mapping, Sequence, deque import", "def __ge__(self, other): return slice_op(operator.ge, self, other, graph=self.graph) def __repr__(self):", "call when the node is executed. args : list Sequence", "value self.dependencies = [] self._args = [] self._predeecessors = []", "== 0: return self elif self.is_shape_finalized() and len(self.nodes) > 0:", "self, other, graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub, other, self,", "unique identifier. shape : tuple or None Shape of the", "else tuple(values) def __getstate__(self): return self.__dict__ def __setstate__(self, data): self.__dict__.update(data)", "{var} with type {type(var)} is not a list or numpy", "raise ValueError(\"`fetches` must be an `Node` instance, node name, or", "len(key) != len(self.shape): raise KeyError(f\"Invalid key shape for {self.name}:\\n\" f\"Shape:", "nodeop(operator.setitem) sub = nodeop(operator.sub) truediv = nodeop(operator.truediv) truth = nodeop(operator.truth)", "for dim in shape: if isinstance(dim, (Node, Integral)): shapes.append(dim) elif", "n in node: stack = [] if isinstance(n, Node): for", "self.instantiate_graph(context, **kwargs) for c in context: if c in fetches", "= nodeop(builtins.bin) eval_ = nodeop(builtins.eval) int_ = nodeop(builtins.int) open_ =", "dependencies=dependencies, value=value, **kwargs) return n def __bool__(self): return True def", "and_(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "None and name not in g.nodes: g = g.graph if", "return slice_op(operator.truediv, self, other, graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv,", "if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") else: domain = Domain(idx) super(var_index,", "__rshift__(self, other): return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self) def", "def _evaluate(self, *args, **kwargs): for aa in list(kwargs.keys()): if aa", "can be a tuple of integers or parameter node names.", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmul__(self) def __rmul__(self,", "node in context: raise ValueError(f\"duplicate value for node '{node}'\") context[node]", "for i in range(len(out_shape)): if out_shape[i] == 1: var =", "kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"], list) else kwargs.pop(\"domain\") elif", "for n in node: stack = [] if isinstance(n, Node):", "self._preds def succs(self): return self._preds def add_predecessor(self, pred): if isinstance(pred,", "in fetches] context = self.instantiate_graph(context, **kwargs) for c in context:", "must be a mapping.\") nodes = list(context) # Add the", "return slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self, other): return slice_op(builtins.pow,", "graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul, other, self, graph=self.graph) def", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self) def __lshift__(self,", "isinstance(val, np.ndarray) or (len(val.shape) == 1 and val.shape[0] == 1)):", "on a node. Parameters ---------- target : cal The multi-dimensional", "k in key: if isinstance(k, Node): name.append(k.name) else: name.append(str(k)) else:", "return floordiv(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "self, graph=self.graph) def __rshift__(self, other): return slice_op(operator.rshift, self, other, graph=self.graph)", "to evaluate the node args : tuple positional arguments passed", "isinstance(node, str): return self.nodes[node] if isinstance(node, Node): if node.name not", "def instantiate_graph(self, context, **kwargs): \"\"\" Instantiate a graph by replacing", "all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var, slice2_idx = self.get_index_nodes(all_args[0], all_args[1])", "self.args] kwargs = {key: partial(value) for key, value in self.kwargs.items()", "slice1_var is None and slice2_var is None: slice1_var, slice2_var =", "name or uuid.uuid4().hex self._op_name = None self.op_name = op_name #", "\"index\")) else other.__rpow__(self) def __matmul__(self, other): return matmul(self, other, graph=self.graph)", "is `None`, this is the top-level graph. op_name : str", "float): s.append(int(shape[idx])) elif isinstance(d, float): s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain)", "f\"{target.__name__}\" if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if isinstance(kwargs[\"domain\"],", "pylint: disable=C0103,R0903 \"\"\" Node wrapper for stateless functions. Parameters ----------", "context: raise ValueError(f\"duplicate value for node '{node}'\") context[node] = value", "str, object] Context whose keys are node instances or names.", "slice2_idx def _evaluate(self, *args, **kwargs): for aa in list(kwargs.keys()): if", "\"\"\" # Add dependencies to the graph graph = Node.get_active_graph(graph)", "to {self} graph, instead belongs to\" f\" {node.graph}\") return node", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self) def", "graph=self.graph) def __neg__(self): return neg(self, graph=self.graph) def __abs__(self): return abs_(self,", "other): return slice_op(operator.lt, self, other, graph=self.graph) def __le__(self, other): return", "self._name = name return self def __getitem__(self, key): if self.is_shape_finalized()", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self) def __rxor__(self, other): return", "__hash__(self): return id(self) def func_hash(self): \"\"\" This returns the functional", "of the node or `None` to use a random, unique", "names. graph : Node or None Parent graph of this", "arguments passed to `func`. \"\"\" return func(*args, **kwargs) @contextlib.contextmanager def", "*args): assert self == Node._graph_stack.pop() def __repr__(self): return \"<node '%s'>\"", "decorator is used without arguments @functools.wraps(target) def _wrapper(*args, **kwargs_inner): return", "instance but does not belong to this graph. \"\"\" if", "(\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self) def __rtruediv__(self, other): return truediv(other,", "no `Graph` instance can be obtained. \"\"\" graph = graph", "to use the default graph. Raises ------ ValueError If no", "= Domain(tuple([])) self._target = None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs)", "init=True) @graph.setter def graph(self, graph): self._graph = Node.get_active_graph(graph) @property def", "in shape]) and len(self.domain) == np.product(shape) and len(shape) > 0:", "__matmul__(self, other): return matmul(self, other, graph=self.graph) def __rmatmul__(self, other): return", "slice_op(operator.xor, self, other, graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor, other,", "Instantiate a graph by replacing all node names with node", "dependencies=None, op_name=None, value=None, **kwargs): self.nodes = Graph() self.value = value", "other): return slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self, other): return", "nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray)", "mod(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "**kwargs) def __repr__(self): return \"<func_op '%s' target=%s args=<%d items>>\" %", "nodeop(builtins.any) divmod_ = nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ = nodeop(builtins.object)", "defined at:\\n\\n%s\" % (n, stack) messages.append(message) raise ex from EvaluationError(\"\".join(messages))", "the node given a context. Parameters ---------- context : dict", "= nodeop(operator.mod) mul = nodeop(operator.mul) ne = nodeop(operator.ne) neg =", "graph self._shape = OrderedDict() self.shape = shape or tuple([]) #", "**kwargs): kwargs[\"op_name\"] = kwargs[\"op_name\"] if \"op_name\" in kwargs \\ else", "elif isinstance(key, tuple): return var_index(self, list(key), name=name, graph=self.graph) else: return", "for node in self.dependencies: node.evaluate(context, callback) def evaluate(self, context, callback=None):", "the parents partial = functools.partial(self.evaluate_node, context=context, callback=callback) args = [partial(arg)", "passed to the target \"\"\" def __init__(self, target, *args, **kwargs):", "cover messages = [] interactive = False if isinstance(node, Node)", "graph=self.graph) def __rfloordiv__(self, other): return slice_op(operator.floordiv, other, self, graph=self.graph) def", "ret = var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple): ret =", "all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs: kwargs.pop(\"op_name\") target_name", "and (len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape", "xor(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "= f\"{self.name}{key}\" if name not in self.nodes.keys(): raise KeyError(f\"{name} not", "0: n = cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs)", "Integral): self._shape = tuple([shape]) elif isinstance(shape, Node): self._shape = tuple([shape])", "found in the associated graph. \"\"\" # TODO: Need a", "s.append(int(d)) elif isinstance(d, var_index): s.append(d.domain) else: s.append(d) self._shape = tuple(s)", "pylint: disable=W0235 if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if", "not node.is_shape_finalized(): context[node] = node.evaluate(context) for name, value in kwargs.items():", "self._args = tuple(new_args) @shape.setter def shape(self, shape): self.set_shape(shape, init=True) @graph.setter", "args): new_args = [] for arg in args: if isinstance(arg,", "multi-dimensional variable used for indexing into. idx : tuple Tuple", "= nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ =", "value : str Unique name of the node. Returns -------", "graph = graph or Node._graph_stack[-1] return graph def instantiate_node(self, node):", "\"\"\" return self(*args, context, **kwargs) @classmethod def evaluate_node(cls, node, context,", "`None` to use the default graph. Raises ------ ValueError If", "node = self.instantiate_node(node) if node in context: raise ValueError(f\"duplicate unequal", "or names to evaluate. context : dict or None Context", "the `_evaluate` method. name : str or None Name of", "with their values. .. note:: This function modifies the context", "def __mul__(self, other): return slice_op(operator.mul, self, other, graph=self.graph) def __rmul__(self,", "associated graph. \"\"\" name = name or uuid.uuid4().hex # TODO:", "either integer values or index/index_op nodes. \"\"\" def __init__(self, target,", "= nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ =", "in [\"output\", \"state\", \"temp\"]: write_name = \"/\".join([f\"{i}{c.write_count-1}\" for i in", "_flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable class Node(object): \"\"\" Base class", "equal to the current ndoe as ewll if self.graph and", "in shape: if isinstance(dim, (Node, Integral)): shapes.append(dim) elif isinstance(dim, float):", "return values[0] if single else tuple(values) def __getstate__(self): return self.__dict__", "shape or tuple([]) # Get a list of all dependencies", "break # pragma: no cover interactive = fname.startswith('<') stack.append(frame) stack", "self.shape[0] def __iter__(self): num = len(self) for i in range(num):", "nodeop(builtins.repr) zip_ = nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals)", "f\"{fnc.__module__}.{fnc.__name__}\" def __getitem__(self, key): return self @property def domain(self): return", "graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield # Remove dependencies from", "else other.__rand__(self) def __rand__(self, other): return and_(other, self, graph=self.graph) if", "\"index\")) else other.__and__(self) def __or__(self, other): return or_(self, other, graph=self.graph)", "Node): self._predecessors.append(pred.gname) else: self._predecessors.append(pred) def add_successor(self, succ): if isinstance(succ, Node):", "nodeop(builtins.divmod) id_ = nodeop(builtins.id) object_ = nodeop(builtins.object) sorted_ = nodeop(builtins.sorted)", "\"temp\"] and not node.is_shape_finalized(): context[node] = node.evaluate(context) return context def", "graph=self.graph) def __or__(self, other): return slice_op(operator.or_, self, other, graph=self.graph) def", "return self.nodes[node] if isinstance(node, Node): if node.name not in self.nodes", "evaluated when an node is evaluated. kwargs : dict Additional", "with positional arguments `args` and keyword arguments `kwargs`. Parameters ----------", "passed to the `_evaluate` method. name : str or None", "other.__add__(self) def __sub__(self, other): return sub(self, other, graph=self.graph) if not", "and name not in g.nodes: g = g.graph if name", "kwargs.pop(aa) return self.target(*args, **kwargs) def __call__(self, *args, **kwargs): return call(self,", "the variable, index combination. Parameters ---------- value : str Unique", "import importlib from .graph import Graph from .domain import Domain", "of either integer values or index/index_op nodes. \"\"\" def __init__(self,", "nodes to be evaluted before evaluating any nodes defined in", "= nodeop(builtins.super) bytes_ = nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ =", "other): return or_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot find {name} in graph nodes.", "% self.name def add_attribute(self, key, value): self.added_attrs.append(key) self.kwargs[key] = value", "var index {self} with variable shape {self.var.shape}\") return True else:", "op_name elif self.__class__.__name__ == \"Node\": self._op_name = self.name else: self._op_name", "ret = self.nodes.item_by_index(idx) return ret else: if isinstance(key, (list)): ret", "not isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable {var} with type {type(var)}", "# Evaluate the node callback = callback or _noop_callback with", "= np.asarray([var]) elif not isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable {var}", "0: return self elif self.is_shape_finalized() and len(self.nodes) > 0: if", "constant given a context. \"\"\" Node.evaluated_nodes += 1 try: if", "nodeop(builtins.bytes) float_ = nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print)", "name in self.graph.nodes: return self.graph.nodes[name] elif isinstance(key, (list)): return var_index(self,", "a node. This can be a tuple of integers or", "self._shape = tuple(shapes) @staticmethod def get_active_graph(graph=None): \"\"\" Obtain the currently", "def __and__(self, other): return slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self,", "*args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): self.nodes =", "len(out_shape) and np.prod(var.shape) != np.prod(out_shape): raise ValueError(f\"Index list does not", "in self.args: self.add_predecessor(e) if isinstance(e, Node): e.add_successor(self) @property def domain(self):", "1: var = np.expand_dims(var, axis=i) else: var = np.squeeze(var) if", ": list Sequence of nodes to be evaluted before evaluating", "slice_op(operator.sub, self, other, graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub, other,", "node names. \"\"\" return self._shape @property def var(self): return self", "this node. \"\"\" return tuple(self._args) @property def argnames(self): return [a.name", "= Node.get_active_graph(graph) @property def gname(self): scope_names = [self.name] cgraph =", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rshift__(self) def __and__(self, other):", "(\"slice_op\", \"var_index\", \"index\")) else other.__or__(self) def __xor__(self, other): return xor(self,", "= nodeop(builtins.range) vars_ = nodeop(builtins.vars) classmethod_ = nodeop(builtins.classmethod) getattr_ =", "None and (not isinstance(val, np.ndarray) or (len(val.shape) == 1 and", "(len(val.shape) == 1 and val.shape[0] == 1) def _evaluate(self, op1,", "graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow, other, self, graph=self.graph) def", "if val is not None and (not isinstance(val, np.ndarray) or", ": list Sequence of positional arguments passed to `func`. kwargs", "nodeop(operator.delitem) eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge)", "__rrshift__(self, other): return slice_op(operator.rshift, other, self, graph=self.graph) def __and__(self, other):", "{key}\") name = f\"{self.name}{key}\" if name not in self.nodes.keys(): raise", "in context: if c in fetches and c.op_name in [\"output\",", "@functools.wraps(target) def _wrapper(*args, **kwargs_inner): return func_op(target, *args, **kwargs_inner, **kwargs) return", "Evaluate one or more nodes given a dictionary of node", "(\"slice_op\", \"var_index\", \"index\")) else other.__lshift__(self) def __rshift__(self, other): return rshift(self,", ": str Operation name which describes the node functionality. value", ": list[str or Node] or str or Node One or", "nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map) reversed_ = nodeop(builtins.reversed)", "other): return and_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "= nodeop(operator.not_) or_ = nodeop(operator.or_) pos = nodeop(operator.pos) rshift =", "if c in fetches and c.op_name in [\"output\", \"state\", \"temp\"]:", "values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches] return", "if self._name is not None and self._name in graph.nodes: graph.update_graph_key(self._name,", "return slice_op(operator.or_, self, other, graph=self.graph) def __ror__(self, other): return slice_op(operator.or_,", "= nodeop(operator.neg) not_ = nodeop(operator.not_) or_ = nodeop(operator.or_) pos =", "self._shape = tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape = tuple([shape]) elif", "return self.value def _evaluate(self, *args, context=None, **kwargs): \"\"\" Inheriting nodes", "= nodeop(operator.pos) rshift = nodeop(operator.rshift) setitem = nodeop(operator.setitem) sub =", "def domain(self): return self.kwargs[\"domain\"] def __getitem__(self, key): if isinstance(key, (tuple,", "nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow)", "uuid.uuid4().hex # TODO: Need a way to check if the", "__rmod__(self, other): return mod(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "\"index\")) else other.__le__(self) def __invert__(self): return inv(self, graph=self.graph) def __neg__(self):", "return floordiv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "@property def domain(self): return self.kwargs[\"domain\"] @property def var(self): var, index_list", "= op_name # Get the stack context so we can", "dependencies to the graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies) yield #", "node) if isinstance(node, list): return [partial(element) for element in node]", "\"\"\" return self._shape @property def var(self): return self @property def", "indexOf = nodeop(operator.indexOf) inv = nodeop(operator.inv) invert = nodeop(operator.invert) ior", "slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) else: domain =", "self.nodes = Graph(n) def insert_node(self, node, idx): node_list = list(self.nodes.items())", "elif isinstance(key, tuple): ret = var_index(self.var, key, graph=self) else: ret", "np.asarray([var]) elif not isinstance(var, (np.ndarray, list)): raise TypeError(f\"Variable {var} with", "= list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes = Graph(node_list) def __call__(self,", "`Node` instance or an node name. RuntimeError If `node` is", "Operation name which describes the node functionality. value : Any", "match {var.shape} in {self.var.name} - {self.var.op_name}\" f\"dimensions for slice {self.args[0].name}", "# TODO: Need a way to check if the existing", "a unique stringwhich corresponds to the variable, index combination. Parameters", "else other.__rlshift__(self) def __rlshift__(self, other): return lshift(other, self, graph=self.graph) if", "self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in kwargs: kwargs.pop(\"op_name\")", "evaluating any nodes defined in this scope. \"\"\" # Add", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ge__(self) def __ne__(self,", "if isinstance(shape, tuple) else tuple(shape) else: for idx, d in", "self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\" f\"Existing: {self.graph.nodes[name].args}\\n\" f\"New:", "in self.graph.nodes: return self.graph.nodes[name] elif isinstance(key, (list)): return var_index(self, key,", "= list(context) # Add the keyword arguments for node in", "the target kwargs : dict keywoard arguments passed to the", "Node): self._shape = tuple([shape]) elif not shape or len(shape) ==", "dependencies=dependencies, value=value, **kwargs) else: n = cls(*args, name=name, shape=shape, graph=graph,", "key = tuple([key]) if len(key) != len(self.shape): raise KeyError(f\"Invalid key", "= g.graph if name in g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot", "ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\" f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if", "other): return le(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "isinstance(node, slice): return slice(*[partial(getattr(node, attr)) for attr in ['start', 'stop',", "== Node._graph_stack.pop() def __repr__(self): return \"<node '%s'>\" % self.name def", "return var_index(self, list(key), graph=self) else: return var_index(self, [key], graph=self) else:", "= shape or tuple([]) # Get a list of all", "op2, context=None, **kwargs): if self.is_scalar(op1) or self.is_scalar(op2): value = self.target(op1,", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self)", "the associated graph. \"\"\" # TODO: Need a way to", "Node): name.append(k.name) else: name.append(str(k)) else: name.append(key) name = self.var.name +", "slice2_var=None): if slice1_var is None and slice2_var is None: slice1_var,", "# Add dependencies to the graph graph = Node.get_active_graph(graph) graph.dependencies.extend(dependencies)", "other): return slice_op(operator.mod, other, self, graph=self.graph) def __lshift__(self, other): return", "nodeop(builtins.filter) issubclass_ = nodeop(builtins.issubclass) pow_ = nodeop(builtins.pow) super_ = nodeop(builtins.super)", "can report where the node was defined self._stack = traceback.extract_stack(limit=1)", "= [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches] return values[0]", "(new_key, self.nodes[k]) if k == old_key else (k, self.nodes[k]), self.nodes.keys()))", "f\"Existing: {self.graph.nodes[name].args}\\n\\t\" f\"New: {self.args}\") if self.graph: graph = self.graph if", "\"var_index\", \"index\")) else other.__rsub__(self) def __rsub__(self, other): return sub(other, self,", "graph=None, dependencies=None, op_name=None, value=None, **kwargs): self.nodes = Graph() self.value =", "nodeop(operator.le) length_hint = nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt = nodeop(operator.lt)", "f\"Node keys: {list(self.nodes.keys())}\") ret = self.nodes[name] return ret else: name", "def graph(self, graph): self._graph = Node.get_active_graph(graph) @property def gname(self): scope_names", "explicit dependencies first self.evaluate_dependencies(context, callback) if self in context: return", "op_name=None, value=None, **kwargs): self.nodes = Graph() self.value = value self.dependencies", "hasattr(key, \"__len__\") and not isinstance(key, str): for k in key:", "isinstance(slice2_var, (slice_op, var_index)) or _is_node_type_instance(slice2_var, \"GroupNode\"): slice2_idx = slice2_var.domain elif", "is not a mapping. \"\"\" if context is None: context", "\"index\")) else other.__rsub__(self) def __rsub__(self, other): return sub(other, self, graph=self.graph)", "name, value in kwargs.items(): node = self.nodes[name] if node in", "return truediv(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "tuple): return var_index(self, list(key), name=name, graph=self.graph) else: return var_index(self, [key],", "self.added_attrs} # Evaluate the node callback = callback or _noop_callback", "self, graph=self.graph) def __mod__(self, other): return slice_op(operator.mod, self, other, graph=self.graph)", "other): return slice_op(operator.gt, self, other, graph=self.graph) def __ge__(self, other): return", "elif self.__class__.__name__ == \"Node\": self._op_name = self.name else: self._op_name =", "target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target = target self.added_attrs += [\"domain\", \"target\"]", "\"\"\" class var_index(Node): # pylint: disable=C0103,W0223 \"\"\" Node representing values", "= nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ =", "def evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate the dependencies of this", "for element in node) if isinstance(node, list): return [partial(element) for", "else other.__or__(self) def __xor__(self, other): return xor(self, other, graph=self.graph) if", "gname(self): scope_names = [self.name] cgraph = self.graph while cgraph: scope_names.append(cgraph.name)", "graph(self, graph): self._graph = Node.get_active_graph(graph) @property def gname(self): scope_names =", "= cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) else: n", "out_shape == (1,) and len(indices) == 1: res = res[0]", "or str or Node One or more `Node` instances or", "nodeop(builtins.float) iter_ = nodeop(builtins.iter) print_ = nodeop(builtins.print) tuple_ = nodeop(builtins.tuple)", "slice2_var, slice2_idx def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph)", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rpow__(self) def __matmul__(self, other): return", "(np.ndarray, list)): raise TypeError(f\"Variable {var} with type {type(var)} is not", "graph del graph.dependencies[-len(dependencies):] #pylint: disable=C0103 abs_ = nodeop(builtins.abs) dict_ =", "in self.nodes and (node.graph != self): raise RuntimeError(f\"node '{node}' does", "graph=self.graph) def __le__(self, other): return slice_op(operator.lt, other, self, graph=self.graph) def", "return or `None` to use the default graph. Raises ------", "= fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack))) message = \"Failed to", "key, graph=self.graph) else: if isinstance(key, (list)): return var_index(self, key, graph=self)", "(Integral, Real, str)): var = np.asarray([var]) elif not isinstance(var, (np.ndarray,", "order to enable \"is shape finalized\" to work self._shape =", "of the node. Returns ------- self : Node This node.", "other): return slice_op(operator.floordiv, other, self, graph=self.graph) def __mod__(self, other): return", "Node): e.add_successor(self) @property def domain(self): return Domain(tuple([])) @property def args(self):", "idx, idx_val in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has indices which", "*args, **kwargs): for aa in list(kwargs.keys()): if aa in self.added_attrs:", "evaluate the node. callback : callable or None Callback to", "in kwargs: kwargs.pop(\"op_name\") target_name = f\"{target.__module__}.{target.__name__}\" super(slice_op, self).__init__(*args, target=target_name, domain=domain,", "operations performed on a node. Parameters ---------- target : cal", "nodeop(builtins.input) oct_ = nodeop(builtins.oct) staticmethod_ = nodeop(builtins.staticmethod) bin_ = nodeop(builtins.bin)", "__ge__(self, other): return ge(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "`func`. kwargs : dict Mapping of keyword arguments passed to", "other, graph=self.graph) def __rtruediv__(self, other): return slice_op(operator.truediv, other, self, graph=self.graph)", "If graph is `None`, this is the top-level graph. \"\"\"", "node_list = list(self.nodes.items()) node_list.insert(idx, (node.name, node)) self.nodes = Graph(node_list) def", "if context is None: context = {} elif not isinstance(context,", "value=None, **kwargs): self.nodes = Graph() self.value = value self.dependencies =", "return [a.name if isinstance(a, Node) else a for a in", "integer values or index/index_op nodes. \"\"\" def __init__(self, var, idx,", "instantiate_node(self, node): # pylint:disable=W0621 \"\"\" Instantiate nodes by retrieving the", "value=value, **kwargs) else: n = cls(*args, name=name, shape=shape, graph=graph, op_name=op_name,", "\"\"\" Node representing multi-dimensional operations performed on a node. Parameters", "Node.evaluated_nodes += 1 try: if isinstance(node, Node): Node._eval_stack.append(node.name) return node.evaluate(context,", "np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape)", "if isinstance(shape, float): self._shape = tuple([np.int(shape)]) elif isinstance(shape, Integral): self._shape", "self._succesors = [] self.args = args if \"name\" in kwargs:", "Shape of the output for a node. This can be", "else: return self.var.shape == DEFAULT_SHAPES[0] def _evaluate(self, var, indices, **kwargs):", "rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else", "cannot be sliced for {self.name}\") elif isinstance(var, list): var =", "def __getattr__(self, name): return getattr_(self, name, graph=self.graph) def __getitem__(self, key):", "**kwargs) partial = functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node, tuple): return", "== 0: n = cls(name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value,", "context, **kwargs): \"\"\" Instantiate a graph by replacing all node", "frozenset_ = nodeop(builtins.frozenset) list_ = nodeop(builtins.list) range_ = nodeop(builtins.range) vars_", "contains = nodeop(operator.contains) countOf = nodeop(operator.countOf) delitem = nodeop(operator.delitem) eq", "name=%s, index=%s>\" % (self.name, self.args) class slice_op(Node): \"\"\" Node representing", "domain = slice1_idx.combine_set_domains(slice2_idx) else: domain = Domain(tuple([])) self._target = None", "if self.__class__.__name__ != \"Node\": if isinstance(key, (slice, Integral)): return getitem(self,", "else: out_shape = self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single = False", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rmod__(self) def __rmod__(self, other): return", "None Callback to be evaluated when an node is evaluated.", "else other.__radd__(self) def __radd__(self, other): return add(other, self, graph=self.graph) if", "= nodeop(operator.length_hint) lshift = nodeop(operator.lshift) lt = nodeop(operator.lt) matmul =", "not an `Node` instance, node name, or a sequence thereof.", "op2 = np.asarray(list(map(lambda x: op2[x], op2_idx))).reshape(self.domain.computed_shape) value = self.target(op1, op2)", "or None Name of the node or `None` to use", "self.graph and name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in", "= graph self._shape = OrderedDict() self.shape = shape or tuple([])", "else other.__rpow__(self) def __rpow__(self, other): return pow_(other, self, graph=self.graph) if", "= [] if isinstance(key, Node): name.append(key.name) elif hasattr(key, \"__len__\") and", "**kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\" Ensure that all `dependencies`", "Domain(idx) super(var_index, self).__init__(var, idx, name=name, domain=domain, **kwargs) @property def domain(self):", "graph=self.graph) def __rsub__(self, other): return slice_op(operator.sub, other, self, graph=self.graph) def", "node : Node or str Node instance or name of", "return self def __getitem__(self, key): if self.is_shape_finalized() and len(self.nodes) >=", "return var_index(self, list(key), name=name, graph=self.graph) else: return var_index(self, [key], name=name,", "other): return sub(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\",", "self.domain.set_computed(out_shape, indices) return res def __add__(self, other): return slice_op(operator.add, self,", "which are used for executing this node. \"\"\" return tuple(self._args)", "\"var_index\", \"index\")) else other.__ne__(self) def __gt__(self, other): return gt(self, other,", "to evaluate the node. callback : callable or None Callback", "belong to {self} graph, instead belongs to\" f\" {node.graph}\") return", "context: raise ValueError(f\"duplicate unequal value for node '{node}'\") context[node] =", "single = True elif isinstance(fetches, Sequence): single = False else:", "in ['start', 'stop', 'step']]) return node except Exception as ex:", "other): return hash(self) == hash(other) def __getattr__(self, name): return getattr_(self,", "self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args = tuple(new_args) @shape.setter def shape(self,", "__repr__(self): return \"<slice_%s '%s'>\" % (self.target.__name__, self.name) class func_op(Node): #", "which to evaluate the nodes. callback : callable or None", "@name.setter def name(self, name): self.set_name(name) @args.setter def args(self, args): new_args", "tuple): return var_index(self, list(key), graph=self) else: return var_index(self, [key], graph=self)", "__bool__(self): return True def __hash__(self): return id(self) def func_hash(self): \"\"\"", "hash of a particular node. The default hash returns an", "or uuid.uuid4().hex self._op_name = None self.op_name = op_name # Get", "the associated graph. KeyError If the current name of the", "return matmul(self, other, graph=self.graph) def __rmatmul__(self, other): return matmul(other, self,", "def __le__(self, other): return le(self, other, graph=self.graph) if not _is_node_type_instance(other,", "function modifies the context in place. Use :code:`context=context.copy()` to avoid", "= nodeop(builtins.exec) isinstance_ = nodeop(builtins.isinstance) ord_ = nodeop(builtins.ord) sum_ =", "= nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr) hash_ =", "def instantiate_node(self, node): # pylint:disable=W0621 \"\"\" Instantiate nodes by retrieving", "*args, **kwargs): if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if", "pragma: no cover interactive = fname.startswith('<') stack.append(frame) stack = \"\".join(traceback.format_list(reversed(stack)))", "elif len(args) == 2: all_args = _flatten_iterable(args) slice1_var, slice1_idx, slice2_var,", "This is called when the decorator is used without arguments", "return slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self, other): return slice_op(operator.mul,", "graph=self.graph) def __reversed__(self): return reversed_(self, graph=self.graph) def update_graph_key(self, old_key, new_key):", "Unique name of the node\"\"\" return self._name @property def op_name(self):", "evaluted before evaluating any nodes defined in this scope. \"\"\"", "node instances or names. kwargs : dict[str, object] Additional context", "set_name(self, name): \"\"\" Set the name of the node and", "tuple Tuple of either integer values or index/index_op nodes. \"\"\"", "= nodeop(operator.is_) is_not = nodeop(operator.is_not) itemgetter = nodeop(operator.itemgetter) le =", "name): self.set_name(name) @args.setter def args(self, args): new_args = [] for", "node functionality. \"\"\" return self._op_name @op_name.setter def op_name(self, op_name): if", "else f\"{target.__name__}\" if \"domain\" in kwargs: domain = tuple(kwargs.pop(\"domain\")) if", "integer value for {self.name}\\n\" f\"\\tDim: {dim}\" f\"\\n\\t{self.kwargs} \") self._shape =", "in range(num): yield self[i] def __eq__(self, other): return hash(self) ==", "old_key, new_key): n = list(map(lambda k: (new_key, self.nodes[k]) if k", ": tuple Tuple of either integer values or index/index_op nodes.", "other): return slice_op(operator.add, other, self, graph=self.graph) def __sub__(self, other): return", "in enumerate(var.shape)]): raise ValueError(f\"var_index {self.name} has indices which are greater", "in fname: continue # Stop tracing at the last interactive", "Node): if self.__class__.__name__ == \"Node\": self.nodes[arg.name] = self.graph[arg.name] new_args.append(arg) self._args", "return slice_op(operator.add, other, self, graph=self.graph) def __sub__(self, other): return slice_op(operator.sub,", "index combination. Parameters ---------- value : str Unique name of", "node. \"\"\" class var_index(Node): # pylint: disable=C0103,W0223 \"\"\" Node representing", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__le__(self) def __invert__(self):", "keys: {list(self.nodes.keys())}\") ret = self.nodes[name] return ret else: name =", "@property def domain(self): return self.kwargs[\"domain\"] def __getitem__(self, key): if isinstance(key,", "self.graph: graph = self.graph if self._name is not None and", "self, graph=self.graph) def __ne__(self, other): return slice_op(operator.ne, self, other, graph=self.graph)", "`None` to use a random, unique identifier. shape : tuple", "g.graph if name in g.nodes: return g.nodes[name] raise RuntimeError(f\"Cannot find", "node or `None` to use a random, unique identifier. shape", "in which to evaluate the node. callback : callable or", "Raises ------ ValueError If no `Graph` instance can be obtained.", "hash(other) def __getattr__(self, name): return getattr_(self, name, graph=self.graph) def __getitem__(self,", "def __rsub__(self, other): return slice_op(operator.sub, other, self, graph=self.graph) def __pow__(self,", "This is called when the decorator is used with arguments", "func(*args, **kwargs) @contextlib.contextmanager def control_dependencies(dependencies, graph=None): \"\"\" Ensure that all", "nodeop(builtins.zip) compile_ = nodeop(builtins.compile) globals_ = nodeop(builtins.globals) map_ = nodeop(builtins.map)", "nodeop(operator.methodcaller) mod = nodeop(operator.mod) mul = nodeop(operator.mul) ne = nodeop(operator.ne)", "and name in self.graph.nodes: raise ValueError(f\"duplicate name '{name}' in {self.graph.name}:\\n\\t\"", "[] if isinstance(n, Node): for frame in reversed(n._stack): # pylint:", "graph=self.graph) def __repr__(self): return \"<slice_%s '%s'>\" % (self.target.__name__, self.name) class", "nodeop(operator.ne) neg = nodeop(operator.neg) not_ = nodeop(operator.not_) or_ = nodeop(operator.or_)", "= np.squeeze(var) if len(var.shape) != len(out_shape) and np.prod(var.shape) != np.prod(out_shape):", "None Parent graph of this node. If graph is `None`,", "is None: context = {} elif not isinstance(context, Mapping): raise", "parameter node names. \"\"\" return self._shape @property def var(self): return", "self.domain.shape_from_indices(indices) indices = self.domain.compute_pairs() single = False if isinstance(var, (Integral,", "other, graph=self.graph) def __rpow__(self, other): return slice_op(builtins.pow, other, self, graph=self.graph)", "node self.dependencies = [] if dependencies is None else dependencies", "(\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self) def __lt__(self, other): return lt(self,", "__and__(self, other): return slice_op(operator.and_, self, other, graph=self.graph) def __rand__(self, other):", "arguments passed to `func`. kwargs : dict Mapping of keyword", "Context whose keys are node instances or names. kwargs :", "sum_ = nodeop(builtins.sum) bytearray_ = nodeop(builtins.bytearray) filter_ = nodeop(builtins.filter) issubclass_", "\"var_index\", \"index\")) else other.__mod__(self) def __lshift__(self, other): return lshift(self, other,", "= functools.partial(cls.evaluate_node, context=context, **kwargs) if isinstance(node, tuple): return tuple(partial(element) for", "Any or None If a node has a default value", "else: var = np.squeeze(var) if len(var.shape) != len(out_shape) and np.prod(var.shape)", "inv = nodeop(operator.inv) invert = nodeop(operator.invert) ior = nodeop(operator.ior) ipow", "= cls(*args, name=name, shape=shape, graph=graph, op_name=op_name, dependencies=dependencies, value=value, **kwargs) return", "self.__class__.__name__ == \"Node\": context[self] = self.value = self._evaluate(*args, context=context, **kwargs)", "var_index): s.append(d.domain) else: s.append(d) self._shape = tuple(s) def is_scalar(self, val):", "tuple : Positional arguments which are used for executing this", "else other.__rrshift__(self) def __rrshift__(self, other): return rshift(other, self, graph=self.graph) if", "return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor, other, self, graph=self.graph) def", "self[i] def __eq__(self, other): return hash(self) == hash(other) def __getattr__(self,", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self) def __floordiv__(self,", ": dict Mapping of keyword arguments passed to `func`. \"\"\"", "scope. Parameters ---------- dependencies : list Sequence of nodes to", "graph = self.graph if self._name and self._name in graph.nodes: graph.update_graph_key(self._name,", "node] if isinstance(node, dict): return {partial(key): partial(value) for key, value", "other.__rxor__(self) def __rxor__(self, other): return xor(other, self, graph=self.graph) if not", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__le__(self) def __invert__(self): return inv(self,", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rfloordiv__(self) def __rfloordiv__(self, other): return", "kwargs : dict keywoard arguments passed to the target \"\"\"", "__repr__(self): return \"<func_op '%s' target=%s args=<%d items>>\" % \\ (self.name,", "nodeop(builtins.hasattr) max_ = nodeop(builtins.max) round_ = nodeop(builtins.round) delattr_ = nodeop(builtins.delattr)", "def args(self): \"\"\" tuple : Positional arguments which are used", "setitem = nodeop(operator.setitem) sub = nodeop(operator.sub) truediv = nodeop(operator.truediv) truth", "len(self.nodes) >= np.prod(self.shape): if isinstance(key, Integral): key = tuple([key]) idx", "underscore private variable self.kwargs = kwargs self.graph = graph self._shape", "input index values. Parameters ---------- var : Node The multi-dimensional", "self.is_scalar(var): out_shape = (1,) indices = (0,) single = True", "str or Node One or more `Node` instances or names", "fetch in fetches] return values[0] if single else tuple(values) def", "can be set using `value`. kwargs : dict Keyword arguments", "more `Node` instances or names to evaluate. context : dict", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__ror__(self) def __ror__(self,", "= self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom)", "Mapping): raise ValueError(\"`context` must be a mapping.\") nodes = list(context)", "an node is evaluated. \"\"\" for node in self.dependencies: node.evaluate(context,", "__rshift__(self, other): return slice_op(operator.rshift, self, other, graph=self.graph) def __rrshift__(self, other):", "class func_op(Node): # pylint: disable=C0103,R0903 \"\"\" Node wrapper for stateless", "\"\"\" for node in self.dependencies: node.evaluate(context, callback) def evaluate(self, context,", "This returns the functional hash of a particular node. The", "domain=domain, **kwargs) @property def domain(self): return self.kwargs[\"domain\"] @property def var(self):", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rand__(self) def", "self): raise RuntimeError(f\"node '{node}' does not belong to {self} graph,", "from .graph import Graph from .domain import Domain from .util", "to the graph self._name = None self.name = name or", "called when the decorator is used with arguments if target", "> 0 else c.name fetches[fetches.index(c)] = c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch,", "return mod(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "\"index\"): slice2_idx = slice2_var.domain else: slice2_idx = Domain(tuple([])) return slice1_var,", "dict Mapping of keyword arguments passed to `func`. \"\"\" return", "name for the node and add the node to the", "Node): name.append(key.name) elif hasattr(key, \"__len__\") and not isinstance(key, str): for", "_is_node_type_instance(slice1_var, \"GroupNode\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([])) if", "a way to check if the existing node is not", "isinstance(key, (slice, Integral)): return getitem(self, key, graph=self.graph) else: if isinstance(key,", "if isinstance(fetches, (str, Node)): fetches = [fetches] single = True", "Domain from .util import _noop_callback, _flatten_iterable, node_hash, \\ _is_node_type_instance, is_iterable", "len(self.nodes) > 0: if isinstance(key, (int, Node)): key = tuple([key])", "callback or _noop_callback with callback(self, context): if self.__class__.__name__ == \"Node\":", "np.ndarray)) and len(key) == 0: return self elif self.is_shape_finalized() and", "def __rpow__(self, other): return slice_op(builtins.pow, other, self, graph=self.graph) def __mul__(self,", "sequence thereof. \"\"\" if isinstance(fetches, (str, Node)): fetches = [fetches]", "(list)): ret = var_index(self.var, tuple(key), graph=self) elif isinstance(key, tuple): ret", "other, graph=self.graph) def __rxor__(self, other): return slice_op(operator.xor, other, self, graph=self.graph)", "self._graph def preds(self): return self._preds def succs(self): return self._preds def", "self.shape == UNSET_SHAPE: return False for s in self.shape: if", "other, self, graph=self.graph) def __sub__(self, other): return slice_op(operator.sub, self, other,", "representing multi-dimensional operations performed on a node. Parameters ---------- target", "other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rrshift__(self)", "Node(object): \"\"\" Base class for nodes. Parameters ---------- args :", "context in which to evaluate the node. callback : callable", "def is_scalar(self, val): return not isinstance(val, np.ndarray) or (len(val.shape) ==", "is evaluated. kwargs : dict Additional context information keyed by", "is evaluated. Returns ------- value : object Output of the", "being modified. Parameters ---------- fetches : list[str or Node] or", "`kwargs`. Parameters ---------- func : callable Function to call when", "def evaluate(self, context, callback=None): \"\"\" Evaluate the node given a", "def domain(self): return Domain(tuple([])) @property def args(self): \"\"\" tuple :", "of a particular node. The default hash returns an object", "def set_shape(self, shape=None, init=False): if isinstance(shape, float): self._shape = tuple([np.int(shape)])", "the functional hash of a particular node. The default hash", "context being modified. Parameters ---------- context : dict[Node or str,", "nodes defined in this scope. \"\"\" # Add dependencies to", "c.graph.nodes[write_name] values = [fetch.evaluate_node(fetch, context, callback=callback) for fetch in fetches]", "node)) self.nodes = Graph(node_list) def __call__(self, *args, **kwargs): return self.run(*args,", "shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs): if len(args) == 0:", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rtruediv__(self) def __floordiv__(self, other): return", "data): self.__dict__.update(data) def set_name(self, name): \"\"\" Set the name of", "slice2_idx = self.get_index_nodes(all_args[0], all_args[1]) domain = slice1_idx.combine_set_domains(slice2_idx) if \"op_name\" in", "else tuple([]) op1 = np.asarray(list(map(lambda x: op1[x], op1_idx))).reshape(self.domain.computed_shape) op2 =", "slice1_var, slice2_var = self.args if isinstance(slice1_var, (slice_op, var_index)) or _is_node_type_instance(slice1_var,", "def name(self): \"\"\"str : Unique name of the node\"\"\" return", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__xor__(self) def", "self, other, graph=self.graph) def __rmul__(self, other): return slice_op(operator.mul, other, self,", "node. Parameters ---------- target : cal The multi-dimensional variable used", "a dictionary of node names with their values. .. note::", "interactive cell if interactive and not fname.startswith('<'): break # pragma:", "`_evaluate` method. name : str or None Name of the", "= self.domain.compute_pairs() single = False if isinstance(var, (Integral, Real, str)):", "True def __hash__(self): return id(self) def func_hash(self): \"\"\" This returns", "Integral): key = tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret", "for idx in indices]).reshape(out_shape) if out_shape == (1,) and len(indices)", "= [] assert isinstance(shape, (tuple, list)) if all([isinstance(sv, Integral) for", "one value for any node. ValueError If `context` is not", "return ne(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "to replicate the new name with a unique stringwhich corresponds", "= [] if isinstance(n, Node): for frame in reversed(n._stack): #", "_is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__gt__(self) def __le__(self, other): return", "key shape for {self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name =", "return res def __add__(self, other): return slice_op(operator.add, self, other, graph=self.graph)", "from collections import OrderedDict, Mapping, Sequence, deque import functools from", "return self.kwargs[\"domain\"] def __getitem__(self, key): if isinstance(key, (tuple, list, np.ndarray))", "in key: if isinstance(k, Node): name.append(k.name) else: name.append(str(k)) else: name.append(key)", "for name, value in kwargs.items(): node = self.nodes[name] if node", "[partial(element) for element in node] if isinstance(node, dict): return {partial(key):", "_is_node_type_instance(slice1_var, \"index\"): slice1_idx = slice1_var.domain else: slice1_idx = Domain(tuple([])) if", "context=None, *, callback=None, **kwargs): \"\"\" Evaluate one or more nodes", "list(map(lambda x: x.tolist() if isinstance(x, np.ndarray) else x, indices)) res", "{self.name}:\\n\" f\"Shape: {self.shape}\\n\" f\"Key: {key}\") name = f\"{self.name}{key}\" if name", "eq = nodeop(operator.eq) floordiv = nodeop(operator.floordiv) ge = nodeop(operator.ge) getitem", "return graph def instantiate_node(self, node): # pylint:disable=W0621 \"\"\" Instantiate nodes", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__add__(self) def", "__gt__(self, other): return gt(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\",", "= Graph(node_list) def __call__(self, *args, **kwargs): return self.run(*args, **kwargs) class", "str_ = nodeop(builtins.str) bool_ = nodeop(builtins.bool) exec_ = nodeop(builtins.exec) isinstance_", "of the node given the context. \"\"\" # Evaluate all", "context): if self.__class__.__name__ == \"Node\": context[self] = self.value = self._evaluate(*args,", "else tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([])", "other, graph=self.graph) def __ge__(self, other): return slice_op(operator.ge, self, other, graph=self.graph)", "self, graph=self.graph) def __mul__(self, other): return slice_op(operator.mul, self, other, graph=self.graph)", "the node\"\"\" return self._name @property def op_name(self): \"\"\" str :", "not is_iterable(node): node = [node] for n in node: stack", "new_key): n = list(map(lambda k: (new_key, self.nodes[k]) if k ==", "__rtruediv__(self, other): return slice_op(operator.truediv, other, self, graph=self.graph) def __floordiv__(self, other):", "Set the name of the node and update the graph.", "return self._op_name @op_name.setter def op_name(self, op_name): if op_name: self._op_name =", "self.graph if self._name is not None and self._name in graph.nodes:", "def __init__(self, *args, name=None, shape=None, graph=None, dependencies=None, op_name=None, value=None, **kwargs):", "graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mul__(self) def", "subgraphs of a node. \"\"\" return node_hash(self) def find_node(self, name):", "= 5 evaluated_nodes = 0 def __init__(self, *args, name=None, shape=None,", "One or more `Node` instances or names to evaluate. context", "to return or `None` to use the default graph. Raises", "keys are node instances. Raises ------ ValueError If the context", "var for var index {self} with variable shape {self.var.shape}\") return", "pylint:disable=W0621 \"\"\" Instantiate nodes by retrieving the node object associated", "if isinstance(key, (list)): return var_index(self, key, graph=self) elif isinstance(key, tuple):", "and discard the values. Parameters ---------- context : dict Normalised", "self, other, graph=self.graph) def __gt__(self, other): return slice_op(operator.gt, self, other,", "key): if self.__class__.__name__ != \"Node\": if isinstance(key, (slice, Integral)): return", "= nodeop(builtins.print) tuple_ = nodeop(builtins.tuple) callable_ = nodeop(builtins.callable) format_ =", "return slice_op(operator.ne, self, other, graph=self.graph) def __gt__(self, other): return slice_op(operator.gt,", "retrieving the node object associated with the node name. Parameters", "var, indices, **kwargs): if self.is_scalar(var): out_shape = (1,) indices =", "list Sequence of positional arguments passed to `func`. kwargs :", "all `dependencies` are executed before any nodes in this scope.", "= self._evaluate(*args, **kwargs) return self.value def _evaluate(self, *args, context=None, **kwargs):", "self.args[1].domain op1_idx = self.domain.map_sub_domain(arg0_dom) if isinstance(self.args[0], Node) else tuple([]) op2_idx", "the node. callback : callable or None Callback to be", "list): return [partial(element) for element in node] if isinstance(node, dict):", "(len(self.var.shape) == 1 and not isinstance(self.var.shape[0],Node)): raise ValueError(f\"Invalid shape var", "val.shape[0] == 1) def _evaluate(self, op1, op2, context=None, **kwargs): if", "using the default graph. Parameters ---------- graph : Node or", "name return self def evaluate_dependencies(self, context, callback=None): \"\"\" Evaluate the", "return sub(other, self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "def __xor__(self, other): return xor(self, other, graph=self.graph) if not _is_node_type_instance(other,", "other.__sub__(self) def __pow__(self, other): return pow_(self, other, graph=self.graph) if not", "key = tuple([key]) idx = np.ravel_multi_index(key, dims=self.shape, order='C') ret =", "self._target = None super(func_op, self).__init__(*args, target=f\"{target.__module__}.{target.__name__}\", domain=domain, **kwargs) self.target =", "is_iterable(node): node = [node] for n in node: stack =", "= value if node.op_name in [\"placeholder\", \"state\", \"input\", \"output\", \"temp\"]", "place. Use :code:`context=context.copy()` to avoid the context being modified. Parameters", "= self.value = self._evaluate(*args, context=context, **kwargs) else: context[self] = self.value", "self, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__mod__(self)", "return rshift(self, other, graph=self.graph) if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\"))", "tuple([]) op2_idx = self.domain.map_sub_domain(arg1_dom) if isinstance(self.args[1], Node) else tuple([]) op1", "Domain(tuple([])) return slice1_var, slice1_idx, slice2_var, slice2_idx def __add__(self, other): return", "def nodeop(target=None, **kwargs): \"\"\" Decorator for creating nodes from functions.", "Real import contextlib import traceback import uuid import numpy as", "a name for the node and add the node to", "@staticmethod def get_active_graph(graph=None): \"\"\" Obtain the currently active graph instance", "def __pow__(self, other): return pow_(self, other, graph=self.graph) if not _is_node_type_instance(other,", "if not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__truediv__(self) def __rtruediv__(self,", "self, graph=self.graph) def __lshift__(self, other): return slice_op(operator.lshift, self, other, graph=self.graph)", "graph=self.graph) def __rrshift__(self, other): return slice_op(operator.rshift, other, self, graph=self.graph) def", "in graph nodes. Graph: {self.graph}\") def __len__(self): #TODO: Update this", "callback=callback) args = [partial(arg) for arg in self.args] kwargs =", "not _is_node_type_instance(other, (\"slice_op\", \"var_index\", \"index\")) else other.__rxor__(self) def __rxor__(self, other):", "used with arguments if target is None: return functools.partial(nodeop, **kwargs)", "= self.nodes[name] if node in context: raise ValueError(f\"duplicate value for", "isinstance(shape, (tuple, list)) if all([isinstance(sv, Integral) for sv in shape])", "hash(self) == hash(other) def __getattr__(self, name): return getattr_(self, name, graph=self.graph)", "to input index values. Parameters ---------- var : Node The" ]
[ "directed to commercial is_star = models.BooleanField(_(\"is start\"), default=False) def __str__(self):", "hollywood else directed to commercial is_star = models.BooleanField(_(\"is start\"), default=False)", "commercial is_star = models.BooleanField(_(\"is start\"), default=False) def __str__(self): return self.name", "directed to hollywood else directed to commercial is_star = models.BooleanField(_(\"is", "here. class Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200) # if is_star", "if is_star he/she will be directed to hollywood else directed", "import models from django.utils.translation import ugettext_lazy as _ # Create", "= models.CharField(_(\"name\"), max_length=200) # if is_star he/she will be directed", "max_length=200) # if is_star he/she will be directed to hollywood", "Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200) # if is_star he/she will", "models.CharField(_(\"name\"), max_length=200) # if is_star he/she will be directed to", "models from django.utils.translation import ugettext_lazy as _ # Create your", "your models here. class Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200) #", "else directed to commercial is_star = models.BooleanField(_(\"is start\"), default=False) def", "he/she will be directed to hollywood else directed to commercial", "# Create your models here. class Actor(models.Model): name = models.CharField(_(\"name\"),", "to hollywood else directed to commercial is_star = models.BooleanField(_(\"is start\"),", "is_star he/she will be directed to hollywood else directed to", "to commercial is_star = models.BooleanField(_(\"is start\"), default=False) def __str__(self): return", "django.utils.translation import ugettext_lazy as _ # Create your models here.", "models here. class Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200) # if", "from django.utils.translation import ugettext_lazy as _ # Create your models", "as _ # Create your models here. class Actor(models.Model): name", "django.db import models from django.utils.translation import ugettext_lazy as _ #", "from django.db import models from django.utils.translation import ugettext_lazy as _", "Create your models here. class Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200)", "class Actor(models.Model): name = models.CharField(_(\"name\"), max_length=200) # if is_star he/she", "be directed to hollywood else directed to commercial is_star =", "name = models.CharField(_(\"name\"), max_length=200) # if is_star he/she will be", "ugettext_lazy as _ # Create your models here. class Actor(models.Model):", "will be directed to hollywood else directed to commercial is_star", "import ugettext_lazy as _ # Create your models here. class", "<gh_stars>0 from django.db import models from django.utils.translation import ugettext_lazy as", "_ # Create your models here. class Actor(models.Model): name =", "# if is_star he/she will be directed to hollywood else" ]
[ "'w') as null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"):", "subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\")", "in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\") if", "os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get the driver version from doxygenConfig.", "clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as null: subprocess.call([\"make\", \"html\"], stdout=null,", "null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir)", "def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as null: subprocess.call([\"doxygen\",", "\"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with", "line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main(): print(\"Generating", "C client docs. \"\"\" from __future__ import with_statement import os", "clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null)", "driver version from doxygenConfig. \"\"\" with open(\"doxygenConfig\") as f: for", "def clean_dir(dir): try: shutil.rmtree(dir) except: pass os.makedirs(dir) def gen_api(dir): clean_dir(dir)", "clean_dir(dir): try: shutil.rmtree(dir) except: pass os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\")", "os import shutil import socket import subprocess import time import", "\"\"\" with open(\"doxygenConfig\") as f: for line in f.readlines(): if", "line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx docs in docs/html\")", "import subprocess import time import urllib2 def clean_dir(dir): try: shutil.rmtree(dir)", "with open(os.devnull, 'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\",", "'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def", "if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx docs in", "dir) def version(): \"\"\"Get the driver version from doxygenConfig. \"\"\"", "stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get the", "def version(): \"\"\"Get the driver version from doxygenConfig. \"\"\" with", "with open(\"doxygenConfig\") as f: for line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"):", "with open(os.devnull, 'w') as null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\")", "urllib2 def clean_dir(dir): try: shutil.rmtree(dir) except: pass os.makedirs(dir) def gen_api(dir):", "the C client docs. \"\"\" from __future__ import with_statement import", "os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as null:", "def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as null: subprocess.call([\"make\",", "open(os.devnull, 'w') as null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if", "os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get the driver version from", "version(): \"\"\"Get the driver version from doxygenConfig. \"\"\" with open(\"doxygenConfig\")", "open(os.devnull, 'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir)", "gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"],", "os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as", "dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as null:", "main(): print(\"Generating Sphinx docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs", "time import urllib2 def clean_dir(dir): try: shutil.rmtree(dir) except: pass os.makedirs(dir)", "from __future__ import with_statement import os import shutil import socket", "def main(): print(\"Generating Sphinx docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen", "for line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main():", "print(\"Generating Sphinx docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in", "with_statement import os import shutil import socket import subprocess import", "f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx docs", "doxygenConfig. \"\"\" with open(\"doxygenConfig\") as f: for line in f.readlines():", "socket import subprocess import time import urllib2 def clean_dir(dir): try:", "docs. \"\"\" from __future__ import with_statement import os import shutil", "stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w')", "gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\") if __name__ ==", "as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir):", "\"\"\"Get the driver version from doxygenConfig. \"\"\" with open(\"doxygenConfig\") as", "as f: for line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip()", "shutil import socket import subprocess import time import urllib2 def", "import socket import subprocess import time import urllib2 def clean_dir(dir):", "version from doxygenConfig. \"\"\" with open(\"doxygenConfig\") as f: for line", "shutil.rmtree(dir) except: pass os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull,", "f: for line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def", "docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\") if __name__", "os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null)", "subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def", "client docs. \"\"\" from __future__ import with_statement import os import", "null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir)", "try: shutil.rmtree(dir) except: pass os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with", "line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating", "pass os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as", "import os import shutil import socket import subprocess import time", "except: pass os.makedirs(dir) def gen_api(dir): clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w')", "return line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx docs in docs/html\") gen_sphinx(\"docs/html\")", "subprocess import time import urllib2 def clean_dir(dir): try: shutil.rmtree(dir) except:", "from doxygenConfig. \"\"\" with open(\"doxygenConfig\") as f: for line in", "\"\"\" from __future__ import with_statement import os import shutil import", "docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\")", "import with_statement import os import shutil import socket import subprocess", "\"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version():", "stdout=null, stderr=null) os.rename(\"docs/source/doxygen/html\", dir) def gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull,", "import urllib2 def clean_dir(dir): try: shutil.rmtree(dir) except: pass os.makedirs(dir) def", "print(\"Generating Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\") if __name__ == \"__main__\":", "import shutil import socket import subprocess import time import urllib2", "os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get the driver", "open(\"doxygenConfig\") as f: for line in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return", "\"\"\"Build the C client docs. \"\"\" from __future__ import with_statement", "if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get the driver version", "Sphinx docs in docs/html\") gen_sphinx(\"docs/html\") print(\"Generating Doxygen docs in docs/html/api\")", "Doxygen docs in docs/html/api\") gen_api(\"docs/html/api\") if __name__ == \"__main__\": main()", "import time import urllib2 def clean_dir(dir): try: shutil.rmtree(dir) except: pass", "clean_dir(dir) clean_dir(\"docs/source/doxygen\") with open(os.devnull, 'w') as null: subprocess.call([\"doxygen\", \"doxygenConfig\"], stdout=null,", "__future__ import with_statement import os import shutil import socket import", "as null: subprocess.call([\"make\", \"html\"], stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\",", "gen_sphinx(dir): clean_dir(dir) os.chdir(\"docs/source/sphinx\") with open(os.devnull, 'w') as null: subprocess.call([\"make\", \"html\"],", "the driver version from doxygenConfig. \"\"\" with open(\"doxygenConfig\") as f:", "stdout=null, stderr=null) os.chdir(\"../../../\") if os.path.isdir(\"docs/source/sphinx/build/html\"): os.rename(\"docs/source/sphinx/build/html\", dir) def version(): \"\"\"Get", "in f.readlines(): if line.startswith(\"PROJECT_NUMBER\"): return line.split(\"=\")[1].strip() def main(): print(\"Generating Sphinx" ]
[ "glm.mat4(1), -self.rotation_deg / 180 * glm.pi(), glm.vec3(0, 0, 1) )", "__exit__(self, exc_type, exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree: float) -> glm.mat3:", "0)) return m def transformation_matrix(self) -> glm.mat3: m = rotation_matrix_2d(self.rotation_deg)", "glm.vec3(0) self._stack = [] def projection_matrix_4(self) -> glm.mat4: scale =", "\"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), }) def pop(self): s = self._stack.pop(-1)", "180 * glm.pi(), glm.vec3(0, 0, 1) ) m = m", "= math.sin(a) ca = math.cos(a) return glm.mat3( ca, sa, 0,", "/ self.rs.render_height m = glm.ortho(-scale * ratio, scale * ratio,", "rs self.scale = 10. self.rotation_deg = 0. self.location = glm.vec3(0)", "= 0. self.location = glm.vec3(0) self._stack = [] def projection_matrix_4(self)", "m = glm.ortho(-scale * ratio, scale * ratio, -scale, scale,", "ratio, -scale, scale, -10, 10) return m def transformation_matrix_4(self) ->", "0, 0, 0, 1 ) class GameRenderSettings(RenderSettings): def __init__(self, *args,", "math.cos(a) return glm.mat3( ca, sa, 0, -sa, ca, 0, 0,", "1) ) m = m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale))", "m = rotation_matrix_2d(self.rotation_deg) m *= self.scale * .5 m[2][0] =", "self.location = glm.vec3(0) self._stack = [] def projection_matrix_4(self) -> glm.mat4:", "= rs self.scale = 10. self.rotation_deg = 0. self.location =", "def projection_matrix_4(self) -> glm.mat4: scale = 1. ratio = self.rs.render_width", "float) -> glm.mat3: a = degree / 180. * math.pi", "scale = 1. ratio = self.rs.render_width / self.rs.render_height m =", ".5 m[2][0] = self.location.x m[2][1] = self.location.y return m def", "-self.location.y, 0)) return m def transformation_matrix(self) -> glm.mat3: m =", "m[2][0] = self.location.x m[2][1] = self.location.y return m def push(self):", "self.rs = rs self.scale = 10. self.rotation_deg = 0. self.location", "self.scale = 10. self.rotation_deg = 0. self.location = glm.vec3(0) self._stack", "self.location.y return m def push(self): self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg,", "= glm.ortho(-scale * ratio, scale * ratio, -scale, scale, -10,", "self.location.x m[2][1] = self.location.y return m def push(self): self._stack.append({ \"scale\":", "* glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return m def transformation_matrix(self) ->", "glm.mat3( ca, sa, 0, -sa, ca, 0, 0, 0, 1", "def pop(self): s = self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg =", "* glm.pi(), glm.vec3(0, 0, 1) ) m = m *", "= m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return m def", "[] def projection_matrix_4(self) -> glm.mat4: scale = 1. ratio =", "rotation_matrix_2d(self.rotation_deg) m *= self.scale * .5 m[2][0] = self.location.x m[2][1]", "def transformation_matrix_4(self) -> glm.mat4: m = glm.rotate( glm.mat4(1), -self.rotation_deg /", "= s[\"location\"] def __enter__(self): self.push() return self def __exit__(self, exc_type,", "0, 1) ) m = m * glm.scale(glm.mat4(), glm.vec3(2. /", "0, 1 ) class GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs): super().__init__(*args,", "transformation_matrix(self) -> glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m *= self.scale *", "m def transformation_matrix(self) -> glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m *=", "self.rs.render_height m = glm.ortho(-scale * ratio, scale * ratio, -scale,", "m def transformation_matrix_4(self) -> glm.mat4: m = glm.rotate( glm.mat4(1), -self.rotation_deg", "-> glm.mat4: m = glm.rotate( glm.mat4(1), -self.rotation_deg / 180 *", "\"location\": self.location.__copy__(), }) def pop(self): s = self._stack.pop(-1) self.scale =", "self.location.__copy__(), }) def pop(self): s = self._stack.pop(-1) self.scale = s[\"scale\"]", "\"GameRenderSettings\"): self.rs = rs self.scale = 10. self.rotation_deg = 0.", "= m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m = m", "pop(self): s = self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg = s[\"rotation\"]", "/ 180. * math.pi sa = math.sin(a) ca = math.cos(a)", "ca, sa, 0, -sa, ca, 0, 0, 0, 1 )", "-> glm.mat3: a = degree / 180. * math.pi sa", "= glm.rotate( glm.mat4(1), -self.rotation_deg / 180 * glm.pi(), glm.vec3(0, 0,", "-self.rotation_deg / 180 * glm.pi(), glm.vec3(0, 0, 1) ) m", "= s[\"rotation\"] self.location = s[\"location\"] def __enter__(self): self.push() return self", "from lib.opengl import RenderSettings class GameProjection: def __init__(self, rs: \"GameRenderSettings\"):", "exc_type, exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree: float) -> glm.mat3: a", "m[2][1] = self.location.y return m def push(self): self._stack.append({ \"scale\": self.scale,", "}) def pop(self): s = self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg", "self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), }) def pop(self): s =", "def push(self): self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), })", "m = m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m =", "= 1. ratio = self.rs.render_width / self.rs.render_height m = glm.ortho(-scale", "= self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location =", "= self.location.y return m def push(self): self._stack.append({ \"scale\": self.scale, \"rotation\":", "__enter__(self): self.push() return self def __exit__(self, exc_type, exc_val, exc_tb): self.pop()", "= 10. self.rotation_deg = 0. self.location = glm.vec3(0) self._stack =", "__init__(self, rs: \"GameRenderSettings\"): self.rs = rs self.scale = 10. self.rotation_deg", "glm.ortho(-scale * ratio, scale * ratio, -scale, scale, -10, 10)", "return glm.mat3( ca, sa, 0, -sa, ca, 0, 0, 0,", "degree / 180. * math.pi sa = math.sin(a) ca =", "def rotation_matrix_2d(degree: float) -> glm.mat3: a = degree / 180.", "-> glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m *= self.scale * .5", "transformation_matrix_4(self) -> glm.mat4: m = glm.rotate( glm.mat4(1), -self.rotation_deg / 180", ") class GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.projection", "= degree / 180. * math.pi sa = math.sin(a) ca", "s[\"location\"] def __enter__(self): self.push() return self def __exit__(self, exc_type, exc_val,", "rs: \"GameRenderSettings\"): self.rs = rs self.scale = 10. self.rotation_deg =", "math from lib.opengl import RenderSettings class GameProjection: def __init__(self, rs:", "ratio, scale * ratio, -scale, scale, -10, 10) return m", "glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x,", "m *= self.scale * .5 m[2][0] = self.location.x m[2][1] =", "*= self.scale * .5 m[2][0] = self.location.x m[2][1] = self.location.y", "-> glm.mat4: scale = 1. ratio = self.rs.render_width / self.rs.render_height", "-sa, ca, 0, 0, 0, 1 ) class GameRenderSettings(RenderSettings): def", "glm import math from lib.opengl import RenderSettings class GameProjection: def", "glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return m def transformation_matrix(self) -> glm.mat3:", "return m def transformation_matrix(self) -> glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m", "1. ratio = self.rs.render_width / self.rs.render_height m = glm.ortho(-scale *", "math.pi sa = math.sin(a) ca = math.cos(a) return glm.mat3( ca,", "self.scale * .5 m[2][0] = self.location.x m[2][1] = self.location.y return", "m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return m", "\"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), }) def pop(self): s", "glm.vec3(2. / self.scale)) m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y,", "self.rs.render_width / self.rs.render_height m = glm.ortho(-scale * ratio, scale *", "0, -sa, ca, 0, 0, 0, 1 ) class GameRenderSettings(RenderSettings):", "= [] def projection_matrix_4(self) -> glm.mat4: scale = 1. ratio", "* math.pi sa = math.sin(a) ca = math.cos(a) return glm.mat3(", "10. self.rotation_deg = 0. self.location = glm.vec3(0) self._stack = []", "projection_matrix_4(self) -> glm.mat4: scale = 1. ratio = self.rs.render_width /", "self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), }) def pop(self):", "exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree: float) -> glm.mat3: a =", "glm.vec3(0, 0, 1) ) m = m * glm.scale(glm.mat4(), glm.vec3(2.", "glm.mat4: scale = 1. ratio = self.rs.render_width / self.rs.render_height m", "GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.projection = GameProjection(self)", "m def push(self): self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(),", "glm.vec3(-self.location.x, -self.location.y, 0)) return m def transformation_matrix(self) -> glm.mat3: m", "glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m *= self.scale * .5 m[2][0]", "self.scale = s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location = s[\"location\"] def", "= glm.vec3(0) self._stack = [] def projection_matrix_4(self) -> glm.mat4: scale", "= math.cos(a) return glm.mat3( ca, sa, 0, -sa, ca, 0,", "def transformation_matrix(self) -> glm.mat3: m = rotation_matrix_2d(self.rotation_deg) m *= self.scale", "s[\"rotation\"] self.location = s[\"location\"] def __enter__(self): self.push() return self def", "glm.mat3: a = degree / 180. * math.pi sa =", "= s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location = s[\"location\"] def __enter__(self):", "self.rotation_deg = 0. self.location = glm.vec3(0) self._stack = [] def", "* ratio, scale * ratio, -scale, scale, -10, 10) return", "rotation_matrix_2d(degree: float) -> glm.mat3: a = degree / 180. *", "import math from lib.opengl import RenderSettings class GameProjection: def __init__(self,", "RenderSettings class GameProjection: def __init__(self, rs: \"GameRenderSettings\"): self.rs = rs", "return m def transformation_matrix_4(self) -> glm.mat4: m = glm.rotate( glm.mat4(1),", "s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location = s[\"location\"] def __enter__(self): self.push()", ") m = m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m", "def __enter__(self): self.push() return self def __exit__(self, exc_type, exc_val, exc_tb):", "a = degree / 180. * math.pi sa = math.sin(a)", "1 ) class GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs)", "= rotation_matrix_2d(self.rotation_deg) m *= self.scale * .5 m[2][0] = self.location.x", "0. self.location = glm.vec3(0) self._stack = [] def projection_matrix_4(self) ->", "return self def __exit__(self, exc_type, exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree:", "0, 0, 1 ) class GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs):", "m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return m def transformation_matrix(self)", "/ self.scale)) m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0))", "scale * ratio, -scale, scale, -10, 10) return m def", "ratio = self.rs.render_width / self.rs.render_height m = glm.ortho(-scale * ratio,", "-scale, scale, -10, 10) return m def transformation_matrix_4(self) -> glm.mat4:", "glm.pi(), glm.vec3(0, 0, 1) ) m = m * glm.scale(glm.mat4(),", "self.pop() def rotation_matrix_2d(degree: float) -> glm.mat3: a = degree /", "/ 180 * glm.pi(), glm.vec3(0, 0, 1) ) m =", "glm.mat4: m = glm.rotate( glm.mat4(1), -self.rotation_deg / 180 * glm.pi(),", "m = glm.rotate( glm.mat4(1), -self.rotation_deg / 180 * glm.pi(), glm.vec3(0,", "s = self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location", "def __init__(self, rs: \"GameRenderSettings\"): self.rs = rs self.scale = 10.", "* ratio, -scale, scale, -10, 10) return m def transformation_matrix_4(self)", "10) return m def transformation_matrix_4(self) -> glm.mat4: m = glm.rotate(", "self.scale)) m = m * glm.translate(glm.mat4(), glm.vec3(-self.location.x, -self.location.y, 0)) return", "push(self): self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\": self.location.__copy__(), }) def", "exc_tb): self.pop() def rotation_matrix_2d(degree: float) -> glm.mat3: a = degree", "self.push() return self def __exit__(self, exc_type, exc_val, exc_tb): self.pop() def", "self.location = s[\"location\"] def __enter__(self): self.push() return self def __exit__(self,", "sa = math.sin(a) ca = math.cos(a) return glm.mat3( ca, sa,", "import glm import math from lib.opengl import RenderSettings class GameProjection:", "lib.opengl import RenderSettings class GameProjection: def __init__(self, rs: \"GameRenderSettings\"): self.rs", "sa, 0, -sa, ca, 0, 0, 0, 1 ) class", "* glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m = m * glm.translate(glm.mat4(),", "ca = math.cos(a) return glm.mat3( ca, sa, 0, -sa, ca,", "self def __exit__(self, exc_type, exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree: float)", "-10, 10) return m def transformation_matrix_4(self) -> glm.mat4: m =", "self.rotation_deg, \"location\": self.location.__copy__(), }) def pop(self): s = self._stack.pop(-1) self.scale", "ca, 0, 0, 0, 1 ) class GameRenderSettings(RenderSettings): def __init__(self,", "= self.rs.render_width / self.rs.render_height m = glm.ortho(-scale * ratio, scale", "import RenderSettings class GameProjection: def __init__(self, rs: \"GameRenderSettings\"): self.rs =", "return m def push(self): self._stack.append({ \"scale\": self.scale, \"rotation\": self.rotation_deg, \"location\":", "def __exit__(self, exc_type, exc_val, exc_tb): self.pop() def rotation_matrix_2d(degree: float) ->", "self._stack.pop(-1) self.scale = s[\"scale\"] self.rotation_deg = s[\"rotation\"] self.location = s[\"location\"]", "class GameRenderSettings(RenderSettings): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.projection =", "GameProjection: def __init__(self, rs: \"GameRenderSettings\"): self.rs = rs self.scale =", "self.rotation_deg = s[\"rotation\"] self.location = s[\"location\"] def __enter__(self): self.push() return", "180. * math.pi sa = math.sin(a) ca = math.cos(a) return", "math.sin(a) ca = math.cos(a) return glm.mat3( ca, sa, 0, -sa,", "= self.location.x m[2][1] = self.location.y return m def push(self): self._stack.append({", "glm.rotate( glm.mat4(1), -self.rotation_deg / 180 * glm.pi(), glm.vec3(0, 0, 1)", "class GameProjection: def __init__(self, rs: \"GameRenderSettings\"): self.rs = rs self.scale", "m * glm.scale(glm.mat4(), glm.vec3(2. / self.scale)) m = m *", "self._stack = [] def projection_matrix_4(self) -> glm.mat4: scale = 1.", "scale, -10, 10) return m def transformation_matrix_4(self) -> glm.mat4: m", "* .5 m[2][0] = self.location.x m[2][1] = self.location.y return m" ]
[ "argparse import json import os import statistics from collections import", "reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry will be", "a __main__ as that matches the format of test.__class__ in", "Any from urllib.request import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC =", "bool: # check that their keys are the same if", "format of test.__class__ in # common_utils.py (where this data will", "\"w+\") as file: slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs:", "get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry will be like (\"test_doc_examples (__main__.TestTypeHints)\"", "the threshold and the set of test cases have not", "= filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar( slow_test_times,", "we will export the calculated \" \"results. The default threshold", "60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = [\"asan\", \"periodic\"] def get_test_case_times()", "test.__class__ in # common_utils.py (where this data will be used),", "-> Dict[str, float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an", "SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() -> Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\"", "Version2Report, ) from typing import cast, DefaultDict, Dict, List, Any", "[\"asan\", \"periodic\"] def get_test_case_times() -> Dict[str, float]: reports: List[Report] =", "} def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]: return {", "-> None: filename = options.filename if os.path.exists(filename): print(f\"Overwriting existent file:", "results with stats/slow-tests.json in pytorch/test-infra. If the relative differences \"", "test cases have not \" \"changed, we will export the", "any(job_name in str(report[\"build_job\"]) for job_name in IGNORED_JOBS): continue for test_file", "running test would look like. name = f\"{casename} (__main__.{suitename})\" succeeded:", "): slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\" \", separators=(\",\",", "{ test_case: time for test_case, time in test_cases_dict.items() if time", "contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents)) def too_similar(", "filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]: return { test_case: time", ") -> bool: # check that their keys are the", "2 only\") v2report = cast(Version2Report, report) if any(job_name in str(report[\"build_job\"])", "List[float]] = defaultdict(list) for report in reports: if report.get(\"format_version\", 1)", "of test.__class__ in # common_utils.py (where this data will be", "parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file", "str(report[\"build_job\"]) for job_name in IGNORED_JOBS): continue for test_file in v2report[\"files\"].values():", "the output # of a running test would look like.", "= urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents)) def too_similar( calculated_times:", "are the same if calculated_times.keys() != other_times.keys(): return False for", "IGNORED_JOBS): continue for test_file in v2report[\"files\"].values(): for suitename, test_suite in", "def too_similar( calculated_times: Dict[str, float], other_times: Dict[str, float], threshold: float", "of test cases have not \" \"changed, we will export", "slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests()", "each test are smaller than the threshold and the set", "for casename, test_case in test_suite[\"cases\"].items(): # The below attaches a", "handled is version 2 only\") v2report = cast(Version2Report, report) if", "return parser.parse_args() def main() -> None: options = parse_args() export_slow_tests(options)", "in reports: if report.get(\"format_version\", 1) != 2: # type: ignore[misc]", "be like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str, List[float]] =", "a running test would look like. name = f\"{casename} (__main__.{suitename})\"", "test_case, test_time in calculated_times.items(): other_test_time = other_times[test_case] relative_difference = abs(", "in test_suite[\"cases\"].items(): # The below attaches a __main__ as that", "format currently handled is version 2 only\") v2report = cast(Version2Report,", "is 10%.\", ) return parser.parse_args() def main() -> None: options", "are smaller than the threshold and the set of test", "get_test_infra_slow_tests() -> Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url,", "from collections import defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report,", "than the threshold and the set of test cases have", "float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry will", "calculated_times.keys() != other_times.keys(): return False for test_case, test_time in calculated_times.items():", "from previous S3 stats. Default file path: .pytorch-slow-tests.json\", ) parser.add_argument(", "RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = [\"asan\", \"periodic\"] def get_test_case_times() ->", "False return True def export_slow_tests(options: Any) -> None: filename =", "for suitename, test_suite in test_file[\"suites\"].items(): for casename, test_case in test_suite[\"cases\"].items():", "Dict[str, float]) -> Dict[str, float]: return { test_case: time for", "} def get_test_infra_slow_tests() -> Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents", "S3 stats. Default file path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\",", "nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file path to dump", "differences \" \"between test times for each test are smaller", "dump slow test times from previous S3 stats. Default file", "relative_difference > threshold: return False return True def export_slow_tests(options: Any)", "test times from previous S3 stats. Default file path: .pytorch-slow-tests.json\",", "if report.get(\"format_version\", 1) != 2: # type: ignore[misc] raise RuntimeError(\"S3", "argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export a JSON of slow test", "help=\"Compares generated results with stats/slow-tests.json in pytorch/test-infra. If the relative", "\"periodic\"] def get_test_case_times() -> Dict[str, float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\",", "cast(Dict[str, float], json.loads(contents)) def too_similar( calculated_times: Dict[str, float], other_times: Dict[str,", "is version 2 only\") v2report = cast(Version2Report, report) if any(job_name", "os.path.exists(filename): print(f\"Overwriting existent file: {filename}\") with open(filename, \"w+\") as file:", "argparse.ArgumentParser( description=\"Export a JSON of slow test cases in PyTorch", "for job_name in IGNORED_JOBS): continue for test_file in v2report[\"files\"].values(): for", ") if relative_difference > threshold: return False return True def", ".pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results", ">= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() -> Dict[str, float]: url =", "options.filename if os.path.exists(filename): print(f\"Overwriting existent file: {filename}\") with open(filename, \"w+\")", "def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export a JSON", "will be like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str, List[float]]", "Dict[str, float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if", "threshold: float ) -> bool: # check that their keys", "def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]: return { test_case:", "= test_case[\"status\"] is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case:", "test_file in v2report[\"files\"].values(): for suitename, test_suite in test_file[\"suites\"].items(): for casename,", "with stats/slow-tests.json in pytorch/test-infra. If the relative differences \" \"between", ") file.write(\"\\n\") def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export", "be used), and also matches what the output # of", "If the relative differences \" \"between test times for each", "from typing import cast, DefaultDict, Dict, List, Any from urllib.request", "# The below attaches a __main__ as that matches the", "url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float],", "report) if any(job_name in str(report[\"build_job\"]) for job_name in IGNORED_JOBS): continue", "test cases in PyTorch unit test suite\" ) parser.add_argument( \"-f\",", "- test_time) / max(other_test_time, test_time) ) if relative_difference > threshold:", "json.loads(contents)) def too_similar( calculated_times: Dict[str, float], other_times: Dict[str, float], threshold:", "their keys are the same if calculated_times.keys() != other_times.keys(): return", "we will export the stats already in stats/slow-tests.json. Else, we", "print(f\"Overwriting existent file: {filename}\") with open(filename, \"w+\") as file: slow_test_times:", "urllib.request import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD", "def export_slow_tests(options: Any) -> None: filename = options.filename if os.path.exists(filename):", "\"between test times for each test are smaller than the", "name = f\"{casename} (__main__.{suitename})\" succeeded: bool = test_case[\"status\"] is None", "json.dump( slow_test_times, file, indent=\" \", separators=(\",\", \": \"), sort_keys=True )", "\"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file path", "test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\" \", separators=(\",\", \": \"), sort_keys=True", "filename = options.filename if os.path.exists(filename): print(f\"Overwriting existent file: {filename}\") with", "= test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\" \", separators=(\",\", \": \"),", "DefaultDict[str, List[float]] = defaultdict(list) for report in reports: if report.get(\"format_version\",", "2: # type: ignore[misc] raise RuntimeError(\"S3 format currently handled is", "= get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry will be like (\"test_doc_examples", "cast(Version2Report, report) if any(job_name in str(report[\"build_job\"]) for job_name in IGNORED_JOBS):", "Dict[str, float]: return { test_case: time for test_case, time in", "# an entry will be like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values]))", "= get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times =", "the calculated \" \"results. The default threshold is 10%.\", )", "10%.\", ) return parser.parse_args() def main() -> None: options =", "import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD =", "same if calculated_times.keys() != other_times.keys(): return False for test_case, test_time", "time >= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() -> Dict[str, float]: url", "DefaultDict, Dict, List, Any from urllib.request import urlopen SLOW_TESTS_FILE =", "IGNORED_JOBS = [\"asan\", \"periodic\"] def get_test_case_times() -> Dict[str, float]: reports:", "parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export a JSON of", "options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ):", "help=\"Specify a file path to dump slow test times from", "report in reports: if report.get(\"format_version\", 1) != 2: # type:", "test_file[\"suites\"].items(): for casename, test_case in test_suite[\"cases\"].items(): # The below attaches", "\" \"between test times for each test are smaller than", "!= other_times.keys(): return False for test_case, test_time in calculated_times.items(): other_test_time", "f\"{casename} (__main__.{suitename})\" succeeded: bool = test_case[\"status\"] is None if succeeded:", "# type: ignore[misc] raise RuntimeError(\"S3 format currently handled is version", "version 2 only\") v2report = cast(Version2Report, report) if any(job_name in", "generated results with stats/slow-tests.json in pytorch/test-infra. If the relative differences", "float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str,", "in stats/slow-tests.json. Else, we will export the calculated \" \"results.", "get_test_case_times() -> Dict[str, float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") #", "\", separators=(\",\", \": \"), sort_keys=True ) file.write(\"\\n\") def parse_args() ->", "test times for each test are smaller than the threshold", "have not \" \"changed, we will export the stats already", "if calculated_times.keys() != other_times.keys(): return False for test_case, test_time in", "export_slow_tests(options: Any) -> None: filename = options.filename if os.path.exists(filename): print(f\"Overwriting", "type: ignore[misc] raise RuntimeError(\"S3 format currently handled is version 2", "List, Any from urllib.request import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC", "will export the calculated \" \"results. The default threshold is", "return True def export_slow_tests(options: Any) -> None: filename = options.filename", "for each test are smaller than the threshold and the", "in IGNORED_JOBS): continue for test_file in v2report[\"files\"].values(): for suitename, test_suite", "other_times[test_case] relative_difference = abs( (other_test_time - test_time) / max(other_test_time, test_time)", "check that their keys are the same if calculated_times.keys() !=", "test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\"", "float]) -> Dict[str, float]: return { test_case: time for test_case,", "max(other_test_time, test_time) ) if relative_difference > threshold: return False return", "in test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() ->", "test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times", "if os.path.exists(filename): print(f\"Overwriting existent file: {filename}\") with open(filename, \"w+\") as", "/ max(other_test_time, test_time) ) if relative_difference > threshold: return False", "cases in PyTorch unit test suite\" ) parser.add_argument( \"-f\", \"--filename\",", "suite\" ) parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify", "float ) -> bool: # check that their keys are", "= f\"{casename} (__main__.{suitename})\" succeeded: bool = test_case[\"status\"] is None if", "float], other_times: Dict[str, float], threshold: float ) -> bool: #", "report.get(\"format_version\", 1) != 2: # type: ignore[misc] raise RuntimeError(\"S3 format", "{ test_case: statistics.mean(times) for test_case, times in test_names_to_times.items() } def", "test_case in test_suite[\"cases\"].items(): # The below attaches a __main__ as", "test_case: statistics.mean(times) for test_case, times in test_names_to_times.items() } def filter_slow_tests(test_cases_dict:", "for test_case, times in test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str, float])", "Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return", "below attaches a __main__ as that matches the format of", "for test_case, time in test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC }", "the same if calculated_times.keys() != other_times.keys(): return False for test_case,", "(where this data will be used), and also matches what", "options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\" \",", ") parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a", "in test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]:", "-> Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\")", "timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents)) def too_similar( calculated_times: Dict[str, float],", "test_time in calculated_times.items(): other_test_time = other_times[test_case] relative_difference = abs( (other_test_time", "None: filename = options.filename if os.path.exists(filename): print(f\"Overwriting existent file: {filename}\")", "keys are the same if calculated_times.keys() != other_times.keys(): return False", "too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times,", "test_case: time for test_case, time in test_cases_dict.items() if time >=", "in calculated_times.items(): other_test_time = other_times[test_case] relative_difference = abs( (other_test_time -", "False for test_case, test_time in calculated_times.items(): other_test_time = other_times[test_case] relative_difference", "-> argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export a JSON of slow", "times in test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str,", "filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict,", "if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times) for test_case, times", "= abs( (other_test_time - test_time) / max(other_test_time, test_time) ) if", "test_case, times in test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str, float]) ->", "from urllib.request import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0", "return False for test_case, test_time in calculated_times.items(): other_test_time = other_times[test_case]", "entry will be like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str,", "test_time) ) if relative_difference > threshold: return False return True", "test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list) for report in reports: if", "export the stats already in stats/slow-tests.json. Else, we will export", "#!/usr/bin/env python3 import argparse import json import os import statistics", "defaultdict(list) for report in reports: if report.get(\"format_version\", 1) != 2:", "suitename, test_suite in test_file[\"suites\"].items(): for casename, test_case in test_suite[\"cases\"].items(): #", "stats/slow-tests.json. Else, we will export the calculated \" \"results. The", "defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report, ) from", "cases have not \" \"changed, we will export the stats", "also matches what the output # of a running test", "time for test_case, time in test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC", "export the calculated \" \"results. The default threshold is 10%.\",", "set of test cases have not \" \"changed, we will", "import json import os import statistics from collections import defaultdict", "if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict json.dump(", "parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with stats/slow-tests.json", "times from previous S3 stats. Default file path: .pytorch-slow-tests.json\", )", "import ( get_previous_reports_for_branch, Report, Version2Report, ) from typing import cast,", "Dict[str, float], threshold: float ) -> bool: # check that", "> threshold: return False return True def export_slow_tests(options: Any) ->", "True def export_slow_tests(options: Any) -> None: filename = options.filename if", "if time >= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() -> Dict[str, float]:", "output # of a running test would look like. name", "float]: return { test_case: time for test_case, time in test_cases_dict.items()", "return { test_case: statistics.mean(times) for test_case, times in test_names_to_times.items() }", "and the set of test cases have not \" \"changed,", "urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1", "if relative_difference > threshold: return False return True def export_slow_tests(options:", "JSON of slow test cases in PyTorch unit test suite\"", ") parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with", "v2report = cast(Version2Report, report) if any(job_name in str(report[\"build_job\"]) for job_name", "statistics from collections import defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch,", "def main() -> None: options = parse_args() export_slow_tests(options) if __name__", "file path to dump slow test times from previous S3", "import defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report, )", "-> bool: # check that their keys are the same", "json import os import statistics from collections import defaultdict from", "<gh_stars>1-10 #!/usr/bin/env python3 import argparse import json import os import", "stats. Default file path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float,", "test_suite in test_file[\"suites\"].items(): for casename, test_case in test_suite[\"cases\"].items(): # The", "statistics.mean(times) for test_case, times in test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str,", "slow test times from previous S3 stats. Default file path:", "relative_difference = abs( (other_test_time - test_time) / max(other_test_time, test_time) )", "PyTorch unit test suite\" ) parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str,", "v2report[\"files\"].values(): for suitename, test_suite in test_file[\"suites\"].items(): for casename, test_case in", "the relative differences \" \"between test times for each test", "Dict, List, Any from urllib.request import urlopen SLOW_TESTS_FILE = \".pytorch-slow-tests.json\"", "typing import cast, DefaultDict, Dict, List, Any from urllib.request import", "test_time) / max(other_test_time, test_time) ) if relative_difference > threshold: return", "file: slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict =", "slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times, file, indent=\" \", separators=(\",\", \":", "will export the stats already in stats/slow-tests.json. Else, we will", "const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with stats/slow-tests.json in pytorch/test-infra. If the", "matches the format of test.__class__ in # common_utils.py (where this", "get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict", "# check that their keys are the same if calculated_times.keys()", "{filename}\") with open(filename, \"w+\") as file: slow_test_times: Dict[str, float] =", "test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times) for test_case, times in test_names_to_times.items()", "separators=(\",\", \": \"), sort_keys=True ) file.write(\"\\n\") def parse_args() -> argparse.Namespace:", "attaches a __main__ as that matches the format of test.__class__", "float], json.loads(contents)) def too_similar( calculated_times: Dict[str, float], other_times: Dict[str, float],", "calculated_times: Dict[str, float], other_times: Dict[str, float], threshold: float ) ->", "of a running test would look like. name = f\"{casename}", "sort_keys=True ) file.write(\"\\n\") def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser(", "path to dump slow test times from previous S3 stats.", "(__main__.{suitename})\" succeeded: bool = test_case[\"status\"] is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"])", "nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with stats/slow-tests.json in pytorch/test-infra.", "test are smaller than the threshold and the set of", "other_times.keys(): return False for test_case, test_time in calculated_times.items(): other_test_time =", "default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file path to dump slow test", "RuntimeError(\"S3 format currently handled is version 2 only\") v2report =", "= 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = [\"asan\", \"periodic\"] def", "file.write(\"\\n\") def parse_args() -> argparse.Namespace: parser = argparse.ArgumentParser( description=\"Export a", "already in stats/slow-tests.json. Else, we will export the calculated \"", "import os import statistics from collections import defaultdict from tools.stats.s3_stat_parser", "collections import defaultdict from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report,", "get_previous_reports_for_branch, Report, Version2Report, ) from typing import cast, DefaultDict, Dict,", "description=\"Export a JSON of slow test cases in PyTorch unit", "currently handled is version 2 only\") v2report = cast(Version2Report, report)", "time in test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests()", "Else, we will export the calculated \" \"results. The default", "= argparse.ArgumentParser( description=\"Export a JSON of slow test cases in", "not \" \"changed, we will export the stats already in", "parser.parse_args() def main() -> None: options = parse_args() export_slow_tests(options) if", "\" \"results. The default threshold is 10%.\", ) return parser.parse_args()", "look like. name = f\"{casename} (__main__.{suitename})\" succeeded: bool = test_case[\"status\"]", "slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs ): slow_test_times = test_infra_slow_tests_dict json.dump( slow_test_times, file,", "as file: slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict", "of slow test cases in PyTorch unit test suite\" )", "that their keys are the same if calculated_times.keys() != other_times.keys():", "SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = [\"asan\", \"periodic\"]", "# common_utils.py (where this data will be used), and also", "type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file path to dump slow", "test suite\" ) parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE,", "List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry will be like", "const=SLOW_TESTS_FILE, help=\"Specify a file path to dump slow test times", "continue for test_file in v2report[\"files\"].values(): for suitename, test_suite in test_file[\"suites\"].items():", "would look like. name = f\"{casename} (__main__.{suitename})\" succeeded: bool =", "times for each test are smaller than the threshold and", "return False return True def export_slow_tests(options: Any) -> None: filename", "previous S3 stats. Default file path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\",", "in PyTorch unit test suite\" ) parser.add_argument( \"-f\", \"--filename\", nargs=\"?\",", "test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC } def get_test_infra_slow_tests() -> Dict[str,", "\"), sort_keys=True ) file.write(\"\\n\") def parse_args() -> argparse.Namespace: parser =", "\"results. The default threshold is 10%.\", ) return parser.parse_args() def", "like. name = f\"{casename} (__main__.{suitename})\" succeeded: bool = test_case[\"status\"] is", "in test_file[\"suites\"].items(): for casename, test_case in test_suite[\"cases\"].items(): # The below", "= cast(Version2Report, report) if any(job_name in str(report[\"build_job\"]) for job_name in", "with open(filename, \"w+\") as file: slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times())", "is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times) for", "-> [values])) test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list) for report in", "\".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS = [\"asan\",", "import argparse import json import os import statistics from collections", "casename, test_case in test_suite[\"cases\"].items(): # The below attaches a __main__", "threshold and the set of test cases have not \"", "\"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE, const=SLOW_TESTS_FILE, help=\"Specify a file path to", "-> Dict[str, float]: return { test_case: time for test_case, time", "None: options = parse_args() export_slow_tests(options) if __name__ == \"__main__\": main()", "SLOW_TESTS_FILE = \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS", "= other_times[test_case] relative_difference = abs( (other_test_time - test_time) / max(other_test_time,", "test_case, time in test_cases_dict.items() if time >= SLOW_TEST_CASE_THRESHOLD_SEC } def", "what the output # of a running test would look", "# of a running test would look like. name =", "common_utils.py (where this data will be used), and also matches", "will be used), and also matches what the output #", "slow test cases in PyTorch unit test suite\" ) parser.add_argument(", "other_test_time = other_times[test_case] relative_difference = abs( (other_test_time - test_time) /", "stats already in stats/slow-tests.json. Else, we will export the calculated", "in # common_utils.py (where this data will be used), and", "def get_test_case_times() -> Dict[str, float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\")", "(__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list) for report", "matches what the output # of a running test would", "= \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents))", "file: {filename}\") with open(filename, \"w+\") as file: slow_test_times: Dict[str, float]", "too_similar( calculated_times: Dict[str, float], other_times: Dict[str, float], threshold: float )", "raise RuntimeError(\"S3 format currently handled is version 2 only\") v2report", "Dict[str, float], other_times: Dict[str, float], threshold: float ) -> bool:", "type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with stats/slow-tests.json in pytorch/test-infra. If", "data will be used), and also matches what the output", "for test_file in v2report[\"files\"].values(): for suitename, test_suite in test_file[\"suites\"].items(): for", "Default file path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD,", "(other_test_time - test_time) / max(other_test_time, test_time) ) if relative_difference >", "threshold is 10%.\", ) return parser.parse_args() def main() -> None:", "a JSON of slow test cases in PyTorch unit test", "only\") v2report = cast(Version2Report, report) if any(job_name in str(report[\"build_job\"]) for", "test would look like. name = f\"{casename} (__main__.{suitename})\" succeeded: bool", "The below attaches a __main__ as that matches the format", "return { test_case: time for test_case, time in test_cases_dict.items() if", "test_suite[\"cases\"].items(): # The below attaches a __main__ as that matches", "succeeded: bool = test_case[\"status\"] is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return", "unit test suite\" ) parser.add_argument( \"-f\", \"--filename\", nargs=\"?\", type=str, default=SLOW_TESTS_FILE,", "if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar( slow_test_times, test_infra_slow_tests_dict, options.ignore_small_diffs", "the set of test cases have not \" \"changed, we", "\"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents = urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents)) def", "import cast, DefaultDict, Dict, List, Any from urllib.request import urlopen", "in str(report[\"build_job\"]) for job_name in IGNORED_JOBS): continue for test_file in", "None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times) for test_case,", "reports: if report.get(\"format_version\", 1) != 2: # type: ignore[misc] raise", "used), and also matches what the output # of a", "= defaultdict(list) for report in reports: if report.get(\"format_version\", 1) !=", "__main__ as that matches the format of test.__class__ in #", "ignore[misc] raise RuntimeError(\"S3 format currently handled is version 2 only\")", "in v2report[\"files\"].values(): for suitename, test_suite in test_file[\"suites\"].items(): for casename, test_case", "urlopen(url, timeout=1).read().decode(\"utf-8\") return cast(Dict[str, float], json.loads(contents)) def too_similar( calculated_times: Dict[str,", "for report in reports: if report.get(\"format_version\", 1) != 2: #", "threshold: return False return True def export_slow_tests(options: Any) -> None:", "test_case[\"status\"] is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times)", "= 0.1 IGNORED_JOBS = [\"asan\", \"periodic\"] def get_test_case_times() -> Dict[str,", "in pytorch/test-infra. If the relative differences \" \"between test times", "the stats already in stats/slow-tests.json. Else, we will export the", "indent=\" \", separators=(\",\", \": \"), sort_keys=True ) file.write(\"\\n\") def parse_args()", "a file path to dump slow test times from previous", "= [\"asan\", \"periodic\"] def get_test_case_times() -> Dict[str, float]: reports: List[Report]", "\" \"changed, we will export the stats already in stats/slow-tests.json.", "os import statistics from collections import defaultdict from tools.stats.s3_stat_parser import", "[values])) test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list) for report in reports:", "calculated \" \"results. The default threshold is 10%.\", ) return", "parser = argparse.ArgumentParser( description=\"Export a JSON of slow test cases", "Any) -> None: filename = options.filename if os.path.exists(filename): print(f\"Overwriting existent", "file path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares", "the format of test.__class__ in # common_utils.py (where this data", "Report, Version2Report, ) from typing import cast, DefaultDict, Dict, List,", "that matches the format of test.__class__ in # common_utils.py (where", "succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return { test_case: statistics.mean(times) for test_case, times in", "other_times: Dict[str, float], threshold: float ) -> bool: # check", "if any(job_name in str(report[\"build_job\"]) for job_name in IGNORED_JOBS): continue for", "= \".pytorch-slow-tests.json\" SLOW_TEST_CASE_THRESHOLD_SEC = 60.0 RELATIVE_DIFFERENCE_THRESHOLD = 0.1 IGNORED_JOBS =", "Dict[str, float]: reports: List[Report] = get_previous_reports_for_branch(\"origin/viable/strict\", \"\") # an entry", "slow_test_times, file, indent=\" \", separators=(\",\", \": \"), sort_keys=True ) file.write(\"\\n\")", ") return parser.parse_args() def main() -> None: options = parse_args()", "as that matches the format of test.__class__ in # common_utils.py", "tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report, ) from typing import", "pytorch/test-infra. If the relative differences \" \"between test times for", "this data will be used), and also matches what the", "job_name in IGNORED_JOBS): continue for test_file in v2report[\"files\"].values(): for suitename,", "def get_test_infra_slow_tests() -> Dict[str, float]: url = \"https://raw.githubusercontent.com/pytorch/test-infra/generated-stats/stats/slow-tests.json\" contents =", "\"changed, we will export the stats already in stats/slow-tests.json. Else,", "to dump slow test times from previous S3 stats. Default", "0.1 IGNORED_JOBS = [\"asan\", \"periodic\"] def get_test_case_times() -> Dict[str, float]:", "existent file: {filename}\") with open(filename, \"w+\") as file: slow_test_times: Dict[str,", "test_names_to_times.items() } def filter_slow_tests(test_cases_dict: Dict[str, float]) -> Dict[str, float]: return", "float] = filter_slow_tests(get_test_case_times()) if options.ignore_small_diffs: test_infra_slow_tests_dict = get_test_infra_slow_tests() if too_similar(", "file, indent=\" \", separators=(\",\", \": \"), sort_keys=True ) file.write(\"\\n\") def", "relative differences \" \"between test times for each test are", "like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list)", "abs( (other_test_time - test_time) / max(other_test_time, test_time) ) if relative_difference", "stats/slow-tests.json in pytorch/test-infra. If the relative differences \" \"between test", "for test_case, test_time in calculated_times.items(): other_test_time = other_times[test_case] relative_difference =", "\"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated results with stats/slow-tests.json in", "-> None: options = parse_args() export_slow_tests(options) if __name__ == \"__main__\":", "open(filename, \"w+\") as file: slow_test_times: Dict[str, float] = filter_slow_tests(get_test_case_times()) if", "smaller than the threshold and the set of test cases", "!= 2: # type: ignore[misc] raise RuntimeError(\"S3 format currently handled", "import statistics from collections import defaultdict from tools.stats.s3_stat_parser import (", "The default threshold is 10%.\", ) return parser.parse_args() def main()", "( get_previous_reports_for_branch, Report, Version2Report, ) from typing import cast, DefaultDict,", "1) != 2: # type: ignore[misc] raise RuntimeError(\"S3 format currently", "an entry will be like (\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times:", "cast, DefaultDict, Dict, List, Any from urllib.request import urlopen SLOW_TESTS_FILE", "calculated_times.items(): other_test_time = other_times[test_case] relative_difference = abs( (other_test_time - test_time)", "default threshold is 10%.\", ) return parser.parse_args() def main() ->", "return cast(Dict[str, float], json.loads(contents)) def too_similar( calculated_times: Dict[str, float], other_times:", "= options.filename if os.path.exists(filename): print(f\"Overwriting existent file: {filename}\") with open(filename,", ") from typing import cast, DefaultDict, Dict, List, Any from", "\"\") # an entry will be like (\"test_doc_examples (__main__.TestTypeHints)\" ->", "bool = test_case[\"status\"] is None if succeeded: test_names_to_times[name].append(test_case[\"seconds\"]) return {", "(\"test_doc_examples (__main__.TestTypeHints)\" -> [values])) test_names_to_times: DefaultDict[str, List[float]] = defaultdict(list) for", "main() -> None: options = parse_args() export_slow_tests(options) if __name__ ==", "\": \"), sort_keys=True ) file.write(\"\\n\") def parse_args() -> argparse.Namespace: parser", "from tools.stats.s3_stat_parser import ( get_previous_reports_for_branch, Report, Version2Report, ) from typing", "float], threshold: float ) -> bool: # check that their", "python3 import argparse import json import os import statistics from", "path: .pytorch-slow-tests.json\", ) parser.add_argument( \"--ignore-small-diffs\", nargs=\"?\", type=float, const=RELATIVE_DIFFERENCE_THRESHOLD, help=\"Compares generated", "and also matches what the output # of a running" ]
[ "weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier )", "1) return x_equal_length action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories", "= int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for _ in range(", "to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories =", "low_bound)[0] j_step_bias[where_lower] = low_bound - j_step_returns[where_lower] where_higher = np.where(j_step_returns >", "1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else: # break", "in range(num_subsets): trajectory_subset = np.arange( int(i * interval), int((i +", "base=self.gamma ) j_step_return_trajectories = [] for j_step in j_steps: j_step_return_trajectories.append(", "def confidence_bounds(x, confidence): n = len(x) m, se = np.mean(x),", "low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error", "+ 1 == mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x +", ") num_trajectories = actions.shape[0] trajectory_length = actions.shape[1] j_steps = [float(\"inf\")]", "j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values =", "zeros(ones) at the end. \"\"\" num_actions = len(target_propensities[0]) terminals =", "importance_weights else: importance_weights /= importance_weights.shape[0] return importance_weights @staticmethod def calculate_step_return(", "CpeEstimate: # For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8", "normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator,", ") else: direct_method_value = np.zeros([num_trajectories]) control_variate = np.sum( np.multiply( weighted_discounts[:,", "/ num_trajectories ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] )", "weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ) #", ": j_step + 1], ), axis=1, ) j_step_return = (", "float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean point estimate", "in range(x.shape[0])], ) x = np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod", "= np.sum(np.multiply(rewards, discounts), axis=1) denominator = np.nanmean(episode_values) if abs(denominator) <", "1], rewards[:, : j_step + 1]), axis=1, ) if j_step", "scipy as sp import torch from ml.rl.evaluation.cpe import CpeEstimate from", "np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions, rewards,", "= 0 episode_ends = np.nonzero(terminals)[0] if len(terminals) - 1 not", "target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights, axis=1) importance_weights =", "estimate( self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate: #", "(num_j_steps - 1) j_steps.extend([i * interval for i in range(1,", "0]: terminals[x] = 1 trajectories = [] episode_start = 0", "return importance_weights @staticmethod def calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values,", "np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values), axis=2", ") importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset)", "estimated_q_values[:, : j_step + 1] ) - np.multiply( weighted_discounts_one_earlier[:, :", "len(x) m, se = np.mean(x), sp.stats.sem(x) h = se *", "WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval = num_trajectories / num_subsets for i", "random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means)", "terminals = np.zeros(mdp_ids.shape[0]) for x in range(0, mdp_ids.shape[0]): if x", "j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities, target_propensities, estimated_q_values,", ") Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return ( action_trajectories,", "def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights,", "NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT", "= 25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT =", ") importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights", "= np.mean(x), sp.stats.sem(x) h = se * sp.stats.t._ppf((1 + confidence)", "np.multiply( weighted_discounts[:, : j_step + 1], estimated_q_values[:, : j_step +", "gamma def estimate( self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) ->", "= to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions])", ") discounts = np.logspace( start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma", "< trajectory_length - 1: direct_method_value = ( weighted_discounts_one_earlier[:, j_step +", "np.sum( np.multiply( weighted_discounts[:, : j_step + 1], estimated_q_values[:, : j_step", "importance_sampled_cumulative_reward + direct_method_value - control_variate ) return j_step_return @staticmethod def", "num_actions = len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for x in range(0,", "\"eq\", \"fun\": lambda x: np.sum(x) - 1.0} x = np.zeros([len(j_steps)])", "range(num_subsets): trajectory_subset = np.arange( int(i * interval), int((i + 1)", "range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate", "= [] for trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory])", "importance_weights[:, :-1]] ) infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights,", "np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights)", "1, 0]: terminals[x] = 1 trajectories = [] episode_start =", "* sp.stats.t._ppf((1 + confidence) / 2.0, n - 1) return", "): random_idxs = np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate(", "denominator, ) def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories,", "< 1e-6: return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return", "denominator = np.nanmean(episode_values) if abs(denominator) < 1e-6: return CpeEstimate( raw=0.0,", "if abs(denominator) < 1e-6: return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0", "def __init__(self, gamma): self.gamma = gamma def estimate( self, edp:", "actions), axis=2 ) estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions), axis=2 )", "random_idxs = np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i]", "= [float(\"inf\")] if num_j_steps > 1: j_steps.append(-1) if num_j_steps >", "self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate: # For", "num_j_steps > 2: interval = trajectory_length // (num_j_steps - 1)", ") # Use bootstrapping to compute weighted_doubly_robust standard error bootstrapped_means", "{\"type\": \"eq\", \"fun\": lambda x: np.sum(x) - 1.0} x =", "= 50 BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self, gamma): self.gamma =", "importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([num_trajectories, 1]) * 1.0", "importance_weights) weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum(", "1]), axis=1, ) if j_step < trajectory_length - 1: direct_method_value", "np.logspace( start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories =", "fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions])", "= np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts), axis=1) denominator = np.nanmean(episode_values)", "* 1.0 / len(trajectory_subset) ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:,", "np.zeros([num_actions]) ) return ( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, )", "7, 8 ( actions, rewards, logged_propensities, target_propensities, estimated_q_values, ) =", "etc.) and output lists of equal-length trajectories (episodes) accoriding to", "low_bound - j_step_returns[where_lower] where_higher = np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] =", "importance_weights.shape[0] return importance_weights @staticmethod def calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier,", "whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1]) * 1.0 /", "high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound covariance = np.cov(j_step_return_trajectories) error", "importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, : j_step + 1],", "+ direct_method_value - control_variate ) return j_step_return @staticmethod def confidence_bounds(x,", "else: direct_method_value = np.zeros([num_trajectories]) control_variate = np.sum( np.multiply( weighted_discounts[:, :", "importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights )", "rewards, logged_propensities, target_propensities, estimated_q_values, ): \"\"\" Take in samples (action,", "(action, rewards, propensities, etc.) and output lists of equal-length trajectories", "( weighted_discounts_one_earlier[:, j_step + 1] * estimated_state_values[:, j_step + 1]", "actions), axis=2 ) estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 )", "estimated_state_values[:, : j_step + 1], ), axis=1, ) j_step_return =", "estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(),", "+ 1], ), axis=1, ) j_step_return = ( importance_sampled_cumulative_reward +", "trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start = episode_end + 1 action_trajectories", "in range(0, mdp_ids.shape[0]): if x + 1 == mdp_ids.shape[0] or", "its affiliates. All rights reserved. import itertools import logging import", "WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error into bias +", "reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length = np.array(", "= np.sum( np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, : j_step", "random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in random_idxs], num_j_steps=sample_size,", "+ 1) * interval) ) importance_weights = ( target_propensity_for_logged_action[trajectory_subset] /", "trajectory_subset = np.arange( int(i * interval), int((i + 1) *", "(episodes) accoriding to terminals. As the raw trajectories are of", "+ confidence) / 2.0, n - 1) return m -", "target_propensity_trajectories = [] Q_value_trajectories = [] for trajectory in trajectories:", "= to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return ( action_trajectories, reward_trajectories, logged_propensity_trajectories,", "0] != mdp_ids[x + 1, 0]: terminals[x] = 1 trajectories", "edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0] trajectory_length = actions.shape[1] j_steps", "are of various lengths, the shorter ones are filled with", "= np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts = np.logspace( start=0,", "num=trajectory_length, base=self.gamma ) j_step_return_trajectories = [] for j_step in j_steps:", "bootstrapped_means = [] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets )", "> high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound covariance = np.cov(j_step_return_trajectories)", "= np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier", "/= sum_importance_weights return importance_weights else: importance_weights /= importance_weights.shape[0] return importance_weights", "( np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset) ) importance_weights_one_earlier =", "j_step_bias = np.zeros([num_j_steps]) where_lower = np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] =", "mdp_ids.shape[0]): if x + 1 == mdp_ids.shape[0] or mdp_ids[x, 0]", "j_step_return_trajectories, ) # Use bootstrapping to compute weighted_doubly_robust standard error", "actions, rewards, logged_propensities, target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(),", "return float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities,", "to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) )", "in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error =", "raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps,", "j_step + 1], estimated_q_values[:, : j_step + 1] ) -", "+ 1], rewards[:, : j_step + 1]), axis=1, ) if", "axis=1) if len(j_step_returns) == 1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error =", "= ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights, axis=1)", "= j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else: # break trajectories into", "int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES", ") return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error /", "2: interval = trajectory_length // (num_j_steps - 1) j_steps.extend([i *", "= int(min(j_step, trajectory_length - 1)) weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier", "1 not in episode_ends: episode_ends = np.append(episode_ends, len(terminals) - 1)", "= 0.0 else: # break trajectories into several subsets to", ") x = np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories(", "importance_weights = target_propensity_for_logged_action / logged_propensities importance_weights = np.cumprod(importance_weights, axis=1) importance_weights", "estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean point", "discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ): trajectory_length = len(rewards[0])", "* estimated_state_values[:, j_step + 1] ) else: direct_method_value = np.zeros([num_trajectories])", "1 trajectories = [] episode_start = 0 episode_ends = np.nonzero(terminals)[0]", "bias + variance j_step_bias = np.zeros([num_j_steps]) where_lower = np.where(j_step_returns <", "EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25", "Q_value_trajectories = [] for trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory])", "gamma): self.gamma = gamma def estimate( self, edp: EvaluationDataPage, num_j_steps,", "[] target_propensity_trajectories = [] Q_value_trajectories = [] for trajectory in", "importance_weights /= sum_importance_weights return importance_weights else: importance_weights /= importance_weights.shape[0] return", "np import scipy as sp import torch from ml.rl.evaluation.cpe import", "j_steps=[j_steps[i] for i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], )", "range(x.shape[0])], ) x = np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod def", "@staticmethod def confidence_bounds(x, confidence): n = len(x) m, se =", "importance_weights /= importance_weights.shape[0] return importance_weights @staticmethod def calculate_step_return( rewards, discounts,", "high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error into", "j_step_bias.T * j_step_bias # minimize mse error constraint = {\"type\":", "num_j_steps > 1: j_steps.append(-1) if num_j_steps > 2: interval =", "from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger =", "int((i + 1) * interval) ) importance_weights = ( target_propensity_for_logged_action[trajectory_subset]", "importance_weights[:, where_zeros] = 1.0 importance_weights /= sum_importance_weights return importance_weights else:", "episode_start = episode_end + 1 action_trajectories = [] reward_trajectories =", "j_step_returns[where_higher] - high_bound covariance = np.cov(j_step_return_trajectories) error = covariance +", "j_step, ): trajectory_length = len(rewards[0]) num_trajectories = len(rewards) j_step =", "j_step_returns[where_lower] where_higher = np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] -", "to compute weighted_doubly_robust standard error bootstrapped_means = [] sample_size =", "target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value))", "= [] target_propensity_trajectories = [] Q_value_trajectories = [] for trajectory", "for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps, sample_size,", "for trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def", "1) * interval) ) importance_weights = ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset]", "= np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"),", "episode_end + 1 action_trajectories = [] reward_trajectories = [] logged_propensity_trajectories", "where_higher = np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound", "filled with zeros(ones) at the end. \"\"\" num_actions = len(target_propensities[0])", "covariance = np.cov(j_step_return_trajectories) error = covariance + j_step_bias.T * j_step_bias", ") bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts), axis=1)", "break trajectories into several subsets to estimate confidence bounds infinite_step_returns", "* interval), int((i + 1) * interval) ) importance_weights =", "j_step = int(min(j_step, trajectory_length - 1)) weighted_discounts = np.multiply(discounts, importance_weights)", "1: direct_method_value = ( weighted_discounts_one_earlier[:, j_step + 1] * estimated_state_values[:,", "into several subsets to estimate confidence bounds infinite_step_returns = []", "trajectories into several subsets to estimate confidence bounds infinite_step_returns =", "+ 1] ) else: direct_method_value = np.zeros([num_trajectories]) control_variate = np.sum(", "= to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions])", "j_steps = [float(\"inf\")] if num_j_steps > 1: j_steps.append(-1) if num_j_steps", "where_zeros] = 1.0 importance_weights /= sum_importance_weights return importance_weights else: importance_weights", "weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts), axis=1) denominator =", "x: np.sum(x) - 1.0} x = np.zeros([len(j_steps)]) res = sp.optimize.minimize(", "args=error, constraints=constraint, bounds=[(0, 1) for _ in range(x.shape[0])], ) x", "= self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ) # Use", "0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros] = 1.0 importance_weights /=", "[importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts,", "num_subsets for i in range(num_subsets): trajectory_subset = np.arange( int(i *", "normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0)", "mse error constraint = {\"type\": \"eq\", \"fun\": lambda x: np.sum(x)", "= WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1])", "res = sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint, bounds=[(0, 1) for", "np.zeros(mdp_ids.shape[0]) for x in range(0, mdp_ids.shape[0]): if x + 1", "all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories,", "+ 1] * estimated_state_values[:, j_step + 1] ) else: direct_method_value", "edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0]", "np.cov(j_step_return_trajectories) error = covariance + j_step_bias.T * j_step_bias # minimize", "np.sum(x) - 1.0} x = np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss,", "= np.sum( np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values,", "[] logged_propensity_trajectories = [] target_propensity_trajectories = [] Q_value_trajectories = []", "reward_trajectories = [] logged_propensity_trajectories = [] target_propensity_trajectories = [] Q_value_trajectories", "np.sum( np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, : j_step +", "mdp_ids[x + 1, 0]: terminals[x] = 1 trajectories = []", ") - np.multiply( weighted_discounts_one_earlier[:, : j_step + 1], estimated_state_values[:, :", "As the raw trajectories are of various lengths, the shorter", "j_step + 1]), axis=1, ) if j_step < trajectory_length -", "to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions]) )", "in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value):", "constraints=constraint, bounds=[(0, 1) for _ in range(x.shape[0])], ) x =", "WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([num_trajectories, 1]) *", "/ denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def compute_weighted_doubly_robust_point_estimate( self,", "standard error bootstrapped_means = [] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT *", "rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories", ") importance_weights = target_propensity_for_logged_action / logged_propensities importance_weights = np.cumprod(importance_weights, axis=1)", "logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL", "np.zeros([num_j_steps]) where_lower = np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] = low_bound -", "): \"\"\" Take in samples (action, rewards, propensities, etc.) and", "len(rewards[0]) num_trajectories = len(rewards) j_step = int(min(j_step, trajectory_length - 1))", "for x in range(0, mdp_ids.shape[0]): if x + 1 ==", "= WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([num_trajectories, 1])", "j_step_return = ( importance_sampled_cumulative_reward + direct_method_value - control_variate ) return", "= np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, :", "- 1)) weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply( discounts,", "= np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) == 1: weighted_doubly_robust = j_step_returns[0]", "discounts), axis=1) denominator = np.nanmean(episode_values) if abs(denominator) < 1e-6: return", "trajectories = [] episode_start = 0 episode_ends = np.nonzero(terminals)[0] if", "h, m + h def mse_loss(x, error): return np.dot(np.dot(x, error),", "edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate: # For details,", "def to_equal_length(x, fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1)", "if len(terminals) - 1 not in episode_ends: episode_ends = np.append(episode_ends,", ") importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, : j_step + 1], rewards[:,", "not in episode_ends: episode_ends = np.append(episode_ends, len(terminals) - 1) for", "( actions, rewards, logged_propensities, target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id,", "weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else: # break trajectories", "num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate: # For details, visit https://arxiv.org/pdf/1604.00923.pdf", "- 1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories = [] for j_step", "infinite_step_returns, j_step_return_trajectories, ) # Use bootstrapping to compute weighted_doubly_robust standard", ") estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values =", "j_step + 1], ), axis=1, ) j_step_return = ( importance_sampled_cumulative_reward", "def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound,", "mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x + 1, 0]: terminals[x]", "in range(1, num_j_steps - 1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions),", "+ 1]), axis=1, ) if j_step < trajectory_length - 1:", "50 BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self, gamma): self.gamma = gamma", "bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts), axis=1) denominator", "j_step_return_trajectories, ): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) #", "= [] logged_propensity_trajectories = [] target_propensity_trajectories = [] Q_value_trajectories =", "j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else: # break trajectories into several", "in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action,", "of various lengths, the shorter ones are filled with zeros(ones)", "* num_subsets ) for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs", "mse_loss, x, args=error, constraints=constraint, bounds=[(0, 1) for _ in range(x.shape[0])],", "> 2: interval = trajectory_length // (num_j_steps - 1) j_steps.extend([i", "CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust", "1] * estimated_state_values[:, j_step + 1] ) else: direct_method_value =", "j_step_returns, infinite_step_returns, j_step_return_trajectories, ) # Use bootstrapping to compute weighted_doubly_robust", "= covariance + j_step_bias.T * j_step_bias # minimize mse error", "importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1]) * 1.0", "Q_value_trajectories, np.zeros([num_actions]) ) return ( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories,", ") interval = num_trajectories / num_subsets for i in range(num_subsets):", "1], estimated_state_values[:, : j_step + 1], ), axis=1, ) j_step_return", "actions.shape[0] trajectory_length = actions.shape[1] j_steps = [float(\"inf\")] if num_j_steps >", "i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error", "num_subsets = int( min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) )", "logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ):", "denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def compute_weighted_doubly_robust_point_estimate( self, j_steps,", "np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier =", "8 ( actions, rewards, logged_propensities, target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories(", "WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, ) )", "rights reserved. import itertools import logging import numpy as np", "All rights reserved. import itertools import logging import numpy as", "as sp import torch from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page", "terminals[x] = 1 trajectories = [] episode_start = 0 episode_ends", "WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1]) *", ": j_step + 1]), axis=1, ) if j_step < trajectory_length", "samples (action, rewards, propensities, etc.) and output lists of equal-length", "n - 1) return m - h, m + h", "terminals. As the raw trajectories are of various lengths, the", "len(terminals) - 1) for episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end +", "// (num_j_steps - 1) j_steps.extend([i * interval for i in", "np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) == 1: weighted_doubly_robust", "- np.multiply( weighted_discounts_one_earlier[:, : j_step + 1], estimated_state_values[:, : j_step", "rewards, logged_propensities, target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(),", "ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__)", "/ denominator, ) def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns, infinite_step_returns,", "len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for x in range(0, mdp_ids.shape[0]): if", "whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros =", "return ( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def", "1)) weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier", "weighted_discounts_one_earlier[:, : j_step + 1], estimated_state_values[:, : j_step + 1],", "num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values", "transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities, target_propensities, estimated_q_values, ): \"\"\" Take", "infinite_step_returns = [] num_subsets = int( min( num_trajectories / 2,", "Compute weighted_doubly_robust mean point estimate using all data weighted_doubly_robust =", "trajectories (episodes) accoriding to terminals. As the raw trajectories are", "estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values = np.sum(", "if num_j_steps > 2: interval = trajectory_length // (num_j_steps -", "j_steps.append(-1) if num_j_steps > 2: interval = trajectory_length // (num_j_steps", "np.ones([num_trajectories, 1]) * 1.0 / num_trajectories ) importance_weights_one_earlier = np.hstack(", "CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class", "trajectory_length - 1)) weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply(", "j_step < trajectory_length - 1: direct_method_value = ( weighted_discounts_one_earlier[:, j_step", "num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval = num_trajectories /", "= actions.shape[1] j_steps = [float(\"inf\")] if num_j_steps > 1: j_steps.append(-1)", ": j_step + 1], estimated_q_values[:, : j_step + 1] )", "int( min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval =", "episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start = episode_end", "- 1 not in episode_ends: episode_ends = np.append(episode_ends, len(terminals) -", "reserved. import itertools import logging import numpy as np import", "target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action = np.sum(", "np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, : j_step + 1]),", "For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8 ( actions,", "control_variate ) return j_step_return @staticmethod def confidence_bounds(x, confidence): n =", "in episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start = episode_end +", "importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([len(trajectory_subset),", "/ len(trajectory_subset) ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] )", ") importance_weights = ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights =", "> 1: j_steps.append(-1) if num_j_steps > 2: interval = trajectory_length", "int(i * interval), int((i + 1) * interval) ) importance_weights", "itertools import logging import numpy as np import scipy as", "actions, rewards, logged_propensities, target_propensities, estimated_q_values, ): \"\"\" Take in samples", "axis=2 ) estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights", "- 1: direct_method_value = ( weighted_discounts_one_earlier[:, j_step + 1] *", "se * sp.stats.t._ppf((1 + confidence) / 2.0, n - 1)", ") infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean point estimate using all", "various lengths, the shorter ones are filled with zeros(ones) at", "sum_importance_weights return importance_weights else: importance_weights /= importance_weights.shape[0] return importance_weights @staticmethod", "= np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions,", "= ( np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset) ) importance_weights_one_earlier", ") reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1", "range(0, mdp_ids.shape[0]): if x + 1 == mdp_ids.shape[0] or mdp_ids[x,", "* interval for i in range(1, num_j_steps - 1)]) target_propensity_for_logged_action", "for j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier,", "np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts = np.logspace( start=0, stop=trajectory_length", "trajectory_length = len(rewards[0]) num_trajectories = len(rewards) j_step = int(min(j_step, trajectory_length", "logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories = to_equal_length( target_propensity_trajectories,", "- 1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action", "== mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x + 1, 0]:", "fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length", "[] for trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory])", "sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros]", "np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions), axis=2", "= np.arange( int(i * interval), int((i + 1) * interval)", "logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights,", "target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if", ") for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps,", "episode_ends = np.append(episode_ends, len(terminals) - 1) for episode_end in episode_ends:", "episode_end + 1)) episode_start = episode_end + 1 action_trajectories =", "( np.ones([num_trajectories, 1]) * 1.0 / num_trajectories ) importance_weights_one_earlier =", "of equal-length trajectories (episodes) accoriding to terminals. As the raw", "to estimate confidence bounds infinite_step_returns = [] num_subsets = int(", "target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length( Q_value_trajectories,", "importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts = np.logspace(", "in samples (action, rewards, propensities, etc.) and output lists of", "episode_values = np.sum(np.multiply(rewards, discounts), axis=1) denominator = np.nanmean(episode_values) if abs(denominator)", "importance_weights = ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights,", "1], estimated_q_values[:, : j_step + 1] ) - np.multiply( weighted_discounts_one_earlier[:,", "estimated_state_values, estimated_q_values, j_step, ): trajectory_length = len(rewards[0]) num_trajectories = len(rewards)", "estimated_state_values[:, j_step + 1] ) else: direct_method_value = np.zeros([num_trajectories]) control_variate", "1] ) - np.multiply( weighted_discounts_one_earlier[:, : j_step + 1], estimated_state_values[:,", "importance_weights_one_earlier = ( np.ones([num_trajectories, 1]) * 1.0 / num_trajectories )", "num_trajectories = actions.shape[0] trajectory_length = actions.shape[1] j_steps = [float(\"inf\")] if", "rewards, propensities, etc.) and output lists of equal-length trajectories (episodes)", "for i in range(num_subsets): trajectory_subset = np.arange( int(i * interval),", "np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i", "at the end. \"\"\" num_actions = len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0])", "len(importance_weights) importance_weights[:, where_zeros] = 1.0 importance_weights /= sum_importance_weights return importance_weights", "calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ): trajectory_length", "[] num_subsets = int( min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, )", "num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ) # Use bootstrapping to compute", "estimated_q_values), axis=2 ) importance_weights = target_propensity_for_logged_action / logged_propensities importance_weights =", "= 1 trajectories = [] episode_start = 0 episode_ends =", "axis=1, ) if j_step < trajectory_length - 1: direct_method_value =", "WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ):", "= {\"type\": \"eq\", \"fun\": lambda x: np.sum(x) - 1.0} x", "2.0, n - 1) return m - h, m +", "confidence): n = len(x) m, se = np.mean(x), sp.stats.sem(x) h", "covariance + j_step_bias.T * j_step_bias # minimize mse error constraint", "Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return ( action_trajectories, reward_trajectories,", "= WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories", "accoriding to terminals. As the raw trajectories are of various", "estimate using all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns,", "@staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights =", "Q_value_trajectories, ) @staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights:", "return importance_weights else: importance_weights /= importance_weights.shape[0] return importance_weights @staticmethod def", ") def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ):", "= WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error into bias", "j_step_returns = np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) == 1: weighted_doubly_robust =", "x, args=error, constraints=constraint, bounds=[(0, 1) for _ in range(x.shape[0])], )", "the shorter ones are filled with zeros(ones) at the end.", "1 action_trajectories = [] reward_trajectories = [] logged_propensity_trajectories = []", "== 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros] = 1.0 importance_weights", "1e-6: return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate(", "= [] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for", "trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length", "sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for _ in", "for episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start =", "ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES", "num_trajectories ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts", "1) j_steps.extend([i * interval for i in range(1, num_j_steps -", "WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES = 50", "several subsets to estimate confidence bounds infinite_step_returns = [] num_subsets", "confidence bounds infinite_step_returns = [] num_subsets = int( min( num_trajectories", "logging import numpy as np import scipy as sp import", "list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length action_trajectories = to_equal_length( action_trajectories,", "start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories = []", "estimated_state_values, estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns =", "estimated_q_values, ): \"\"\" Take in samples (action, rewards, propensities, etc.)", "with zeros(ones) at the end. \"\"\" num_actions = len(target_propensities[0]) terminals", "axis=2 ) estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values", "rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ): trajectory_length =", "importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ): trajectory_length = len(rewards[0]) num_trajectories =", "min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval = num_trajectories", "edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0] trajectory_length =", "* j_step_bias # minimize mse error constraint = {\"type\": \"eq\",", "action_trajectories, np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length(", "for i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate)", "infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards,", "error into bias + variance j_step_bias = np.zeros([num_j_steps]) where_lower =", ") = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), )", "= np.logspace( start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories", "to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return ( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories,", "NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self, gamma): self.gamma", "np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, : j_step", "as np import scipy as sp import torch from ml.rl.evaluation.cpe", "/ 2.0, n - 1) return m - h, m", "[] episode_start = 0 episode_ends = np.nonzero(terminals)[0] if len(terminals) -", "edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0] trajectory_length = actions.shape[1]", "np.append(episode_ends, len(terminals) - 1) for episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end", "#!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates.", "import numpy as np import scipy as sp import torch", "25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT = 0.5", "= np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return(", "discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) #", "bounds=[(0, 1) for _ in range(x.shape[0])], ) x = np.array(res.x)", "equal-length trajectories (episodes) accoriding to terminals. As the raw trajectories", "[importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts = np.logspace( start=0, stop=trajectory_length -", "torch from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger", "np.sum( np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action = np.sum( np.multiply(estimated_q_values, actions),", "( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def normalize_importance_weights(", "np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts), axis=1) denominator = np.nanmean(episode_values) if", "episode_ends = np.nonzero(terminals)[0] if len(terminals) - 1 not in episode_ends:", "j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds(", "compute weighted_doubly_robust standard error bootstrapped_means = [] sample_size = int(", "+ 1)) episode_start = episode_end + 1 action_trajectories = []", "confidence_bounds(x, confidence): n = len(x) m, se = np.mean(x), sp.stats.sem(x)", "where_zeros = np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros]", "trajectories are of various lengths, the shorter ones are filled", "shorter ones are filled with zeros(ones) at the end. \"\"\"", "infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset],", "end. \"\"\" num_actions = len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for x", "= len(rewards[0]) num_trajectories = len(rewards) j_step = int(min(j_step, trajectory_length -", "j_step + 1] * estimated_state_values[:, j_step + 1] ) else:", "where_lower = np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] = low_bound - j_step_returns[where_lower]", "import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES =", "weighted_doubly_robust_std_error = 0.0 else: # break trajectories into several subsets", "bootstrapping to compute weighted_doubly_robust standard error bootstrapped_means = [] sample_size", "from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator:", "+ 1] ) - np.multiply( weighted_discounts_one_earlier[:, : j_step + 1],", "1) return m - h, m + h def mse_loss(x,", ") importance_weights_one_earlier = ( np.ones([num_trajectories, 1]) * 1.0 / num_trajectories", "-> CpeEstimate: # For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7,", "j_step + 1], estimated_state_values[:, : j_step + 1], ), axis=1,", "1]) * 1.0 / len(trajectory_subset) ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier,", "self, j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound =", "= gamma def estimate( self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, )", "[] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets ) for _", "j_step_return_trajectories=j_step_return_trajectories[random_idxs], ) bootstrapped_means.append(wdr_estimate) weighted_doubly_robust_std_error = np.std(bootstrapped_means) episode_values = np.sum(np.multiply(rewards, discounts),", "if len(j_step_returns) == 1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error = 0.0", "stop=trajectory_length - 1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories = [] for", "= np.append(episode_ends, len(terminals) - 1) for episode_end in episode_ends: trajectories.append(np.arange(episode_start,", "= ( weighted_discounts_one_earlier[:, j_step + 1] * estimated_state_values[:, j_step +", "= logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL =", "j_step_return @staticmethod def confidence_bounds(x, confidence): n = len(x) m, se", "1 ) target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories =", "sp import torch from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import", "interval = trajectory_length // (num_j_steps - 1) j_steps.extend([i * interval", "x = np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint,", "raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def", "1, num=trajectory_length, base=self.gamma ) j_step_return_trajectories = [] for j_step in", "WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate =", "importance_weights[:, :-1]] ) discounts = np.logspace( start=0, stop=trajectory_length - 1,", "control_variate = np.sum( np.multiply( weighted_discounts[:, : j_step + 1], estimated_q_values[:,", "= np.nonzero(terminals)[0] if len(terminals) - 1 not in episode_ends: episode_ends", "i in range(num_subsets): trajectory_subset = np.arange( int(i * interval), int((i", "= episode_end + 1 action_trajectories = [] reward_trajectories = []", "= len(x) m, se = np.mean(x), sp.stats.sem(x) h = se", "\"\"\" Take in samples (action, rewards, propensities, etc.) and output", "= 1.0 importance_weights /= sum_importance_weights return importance_weights else: importance_weights /=", "_ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps, sample_size, replace=False)", "@staticmethod def calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step,", "float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities, target_propensities,", ": j_step + 1], rewards[:, : j_step + 1]), axis=1,", "propensities, etc.) and output lists of equal-length trajectories (episodes) accoriding", "/= importance_weights.shape[0] return importance_weights @staticmethod def calculate_step_return( rewards, discounts, importance_weights,", "j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, )", "in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs = np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort()", "edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0] trajectory_length", "discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, : j_step +", "using all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns,", "m, se = np.mean(x), sp.stats.sem(x) h = se * sp.stats.t._ppf((1", "/ logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(", "importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns", "WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) )", "1], ), axis=1, ) j_step_return = ( importance_sampled_cumulative_reward + direct_method_value", "= target_propensity_for_logged_action / logged_propensities importance_weights = np.cumprod(importance_weights, axis=1) importance_weights =", "EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate: # For details, visit", "= np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros] =", "target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return", "sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint, bounds=[(0, 1) for _ in", "axis=1, ) j_step_return = ( importance_sampled_cumulative_reward + direct_method_value - control_variate", "to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories =", "import torch from ml.rl.evaluation.cpe import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage", "j_step_bias[where_lower] = low_bound - j_step_returns[where_lower] where_higher = np.where(j_step_returns > high_bound)[0]", "x = np.array(res.x) return float(np.dot(x, j_step_returns)) @staticmethod def transform_to_equal_length_trajectories( mdp_ids,", "= np.cov(j_step_return_trajectories) error = covariance + j_step_bias.T * j_step_bias #", "lengths, the shorter ones are filled with zeros(ones) at the", ") -> CpeEstimate: # For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5,", "= [] for j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts,", "error bootstrapped_means = [] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT * num_subsets", "high_bound covariance = np.cov(j_step_return_trajectories) error = covariance + j_step_bias.T *", "= np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) == 1:", "return j_step_return @staticmethod def confidence_bounds(x, confidence): n = len(x) m,", "for _ in range(x.shape[0])], ) x = np.array(res.x) return float(np.dot(x,", "num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns,", "0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self, gamma):", "* interval) ) importance_weights = ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] )", "logged_propensities importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights", "importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return = np.sum(", "np.nanmean(episode_values) if abs(denominator) < 1e-6: return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0,", ") j_step_return = ( importance_sampled_cumulative_reward + direct_method_value - control_variate )", "x in range(0, mdp_ids.shape[0]): if x + 1 == mdp_ids.shape[0]", ":-1]] ) discounts = np.logspace( start=0, stop=trajectory_length - 1, num=trajectory_length,", "interval), int((i + 1) * interval) ) importance_weights = (", "importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ): trajectory_length = len(rewards[0]) num_trajectories", ") if j_step < trajectory_length - 1: direct_method_value = (", "point estimate using all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps,", ") return j_step_return @staticmethod def confidence_bounds(x, confidence): n = len(x)", ") infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset],", ") ) j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1) if", "importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust", "np.multiply( weighted_discounts_one_earlier[:, : j_step + 1], estimated_state_values[:, : j_step +", ") importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) discounts =", "bounds infinite_step_returns = [] num_subsets = int( min( num_trajectories /", "= j_step_returns[where_higher] - high_bound covariance = np.cov(j_step_return_trajectories) error = covariance", ") ) infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean point estimate using", "interval) ) importance_weights = ( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights", ":-1]] ) infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier,", "mean point estimate using all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps,", "np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset) ) importance_weights_one_earlier = np.hstack(", "= [] reward_trajectories = [] logged_propensity_trajectories = [] target_propensity_trajectories =", "- 1) j_steps.extend([i * interval for i in range(1, num_j_steps", "discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories =", "np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories,", "return m - h, m + h def mse_loss(x, error):", "and output lists of equal-length trajectories (episodes) accoriding to terminals.", "logged_propensity_trajectories, 1 ) target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories", "normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error", "1)) episode_start = episode_end + 1 action_trajectories = [] reward_trajectories", "= np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] =", "normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns,", "= np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound covariance", "normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, ) def compute_weighted_doubly_robust_point_estimate(", "raw trajectories are of various lengths, the shorter ones are", "estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories,", "action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def normalize_importance_weights( importance_weights,", "mdp_ids[x, 0] != mdp_ids[x + 1, 0]: terminals[x] = 1", "np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward =", "self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ) # Use bootstrapping", "def estimate( self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights, ) -> CpeEstimate:", "= np.random.choice(num_j_steps, sample_size, replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for", "self.gamma = gamma def estimate( self, edp: EvaluationDataPage, num_j_steps, whether_self_normalize_importance_weights,", "trajectory_length = actions.shape[1] j_steps = [float(\"inf\")] if num_j_steps > 1:", "< low_bound)[0] j_step_bias[where_lower] = low_bound - j_step_returns[where_lower] where_higher = np.where(j_step_returns", "( importance_sampled_cumulative_reward + direct_method_value - control_variate ) return j_step_return @staticmethod", "WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error into bias + variance j_step_bias", "raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error,", "( target_propensity_for_logged_action[trajectory_subset] / logged_propensities[trajectory_subset] ) importance_weights = np.cumprod(importance_weights, axis=1) importance_weights", "+ j_step_bias.T * j_step_bias # minimize mse error constraint =", ") j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns)", "weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:,", "episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start = episode_end + 1", "range(1, num_j_steps - 1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions), axis=2", "Use bootstrapping to compute weighted_doubly_robust standard error bootstrapped_means = []", "): if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights", "Section 5, 7, 8 ( actions, rewards, logged_propensities, target_propensities, estimated_q_values,", "axis=1) denominator = np.nanmean(episode_values) if abs(denominator) < 1e-6: return CpeEstimate(", "lambda x: np.sum(x) - 1.0} x = np.zeros([len(j_steps)]) res =", "j_step + 1] ) - np.multiply( weighted_discounts_one_earlier[:, : j_step +", "np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) == 1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error", "= np.multiply(discounts, importance_weights) weighted_discounts_one_earlier = np.multiply( discounts, importance_weights_one_earlier ) importance_sampled_cumulative_reward", "= to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories", "# Compute weighted_doubly_robust mean point estimate using all data weighted_doubly_robust", "0.0 else: # break trajectories into several subsets to estimate", "np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] = low_bound - j_step_returns[where_lower] where_higher =", "= len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for x in range(0, mdp_ids.shape[0]):", "return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator,", "action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length =", "importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) # Compute", "x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length action_trajectories", "weighted_discounts[:, : j_step + 1], estimated_q_values[:, : j_step + 1]", "1) for episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1)) episode_start", "numpy as np import scipy as sp import torch from", "whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([num_trajectories, 1]) * 1.0 /", "weighted_doubly_robust mean point estimate using all data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate(", "np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length action_trajectories = to_equal_length(", "= np.sum( np.multiply( weighted_discounts[:, : j_step + 1], estimated_q_values[:, :", "trajectory_length // (num_j_steps - 1) j_steps.extend([i * interval for i", "- h, m + h def mse_loss(x, error): return np.dot(np.dot(x,", ") importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return =", "1.0 / len(trajectory_subset) ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]]", "j_step, ) ) j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1)", "): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose", "np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights = target_propensity_for_logged_action / logged_propensities", "- 1) for episode_end in episode_ends: trajectories.append(np.arange(episode_start, episode_end + 1))", "whether_self_normalize_importance_weights, ) -> CpeEstimate: # For details, visit https://arxiv.org/pdf/1604.00923.pdf Section", "0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories = to_equal_length(", "def calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values, j_step, ):", "visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8 ( actions, rewards, logged_propensities,", "h = se * sp.stats.t._ppf((1 + confidence) / 2.0, n", "rewards[:, : j_step + 1]), axis=1, ) if j_step <", "if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights ==", "sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros] = 1.0 importance_weights /= sum_importance_weights", "len(rewards) j_step = int(min(j_step, trajectory_length - 1)) weighted_discounts = np.multiply(discounts,", "compute_weighted_doubly_robust_point_estimate( self, j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound", "= np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights = target_propensity_for_logged_action /", "= sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint, bounds=[(0, 1) for _", "estimated_q_values, j_step, ): trajectory_length = len(rewards[0]) num_trajectories = len(rewards) j_step", "importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = ( np.ones([num_trajectories,", "num_j_steps - 1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions), axis=2 )", "axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ) importance_weights_one_earlier = (", "else: # break trajectories into several subsets to estimate confidence", "# minimize mse error constraint = {\"type\": \"eq\", \"fun\": lambda", "trajectory_length - 1: direct_method_value = ( weighted_discounts_one_earlier[:, j_step + 1]", "episode_ends: episode_ends = np.append(episode_ends, len(terminals) - 1) for episode_end in", ").swapaxes(0, 1) return x_equal_length action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions]) )", "= actions.shape[0] trajectory_length = actions.shape[1] j_steps = [float(\"inf\")] if num_j_steps", "return x_equal_length action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories =", "Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0,", "if x + 1 == mdp_ids.shape[0] or mdp_ids[x, 0] !=", "episode_start = 0 episode_ends = np.nonzero(terminals)[0] if len(terminals) - 1", "i in range(1, num_j_steps - 1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities,", "num_subsets ) for _ in range( WeightedSequentialDoublyRobustEstimator.NUM_BOOTSTRAP_SAMPLES ): random_idxs =", "= trajectory_length // (num_j_steps - 1) j_steps.extend([i * interval for", "error constraint = {\"type\": \"eq\", \"fun\": lambda x: np.sum(x) -", "/ logged_propensities importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights( importance_weights,", "logged_propensities, target_propensities, estimated_q_values, ): \"\"\" Take in samples (action, rewards,", "self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns, j_step_return_trajectories=j_step_return_trajectories[random_idxs],", "+ 1], estimated_q_values[:, : j_step + 1] ) - np.multiply(", "j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step,", "the raw trajectories are of various lengths, the shorter ones", "importance_weights_one_earlier = ( np.ones([len(trajectory_subset), 1]) * 1.0 / len(trajectory_subset) )", "= 0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self,", "rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return)", "# decompose error into bias + variance j_step_bias = np.zeros([num_j_steps])", "), axis=1, ) j_step_return = ( importance_sampled_cumulative_reward + direct_method_value -", "+ variance j_step_bias = np.zeros([num_j_steps]) where_lower = np.where(j_step_returns < low_bound)[0]", "np.zeros([num_trajectories]) control_variate = np.sum( np.multiply( weighted_discounts[:, : j_step + 1],", "* 1.0 / num_trajectories ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:,", "= [] Q_value_trajectories = [] for trajectory in trajectories: action_trajectories.append(actions[trajectory])", "len(j_step_returns) == 1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else:", "np.arange( int(i * interval), int((i + 1) * interval) )", "direct_method_value - control_variate ) return j_step_return @staticmethod def confidence_bounds(x, confidence):", "np.sum( np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values),", "j_step + 1], rewards[:, : j_step + 1]), axis=1, )", "axis=0) where_zeros = np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:,", "replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in random_idxs],", "affiliates. All rights reserved. import itertools import logging import numpy", "j_step_bias # minimize mse error constraint = {\"type\": \"eq\", \"fun\":", "5, 7, 8 ( actions, rewards, logged_propensities, target_propensities, estimated_q_values, )", "= se * sp.stats.t._ppf((1 + confidence) / 2.0, n -", "confidence) / 2.0, n - 1) return m - h,", "python3 # Copyright (c) Facebook, Inc. and its affiliates. All", "= low_bound - j_step_returns[where_lower] where_higher = np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher]", "1.0 importance_weights /= sum_importance_weights return importance_weights else: importance_weights /= importance_weights.shape[0]", "to terminals. As the raw trajectories are of various lengths,", "axis=2 ) importance_weights = target_propensity_for_logged_action / logged_propensities importance_weights = np.cumprod(importance_weights,", "CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES = 50 BOOTSTRAP_SAMPLE_PCT = 0.5 def", "logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES", "= np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] = low_bound - j_step_returns[where_lower] where_higher", "+ 1, 0]: terminals[x] = 1 trajectories = [] episode_start", "x + 1 == mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x", "x_equal_length action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories,", "data weighted_doubly_robust = self.compute_weighted_doubly_robust_point_estimate( j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, )", "estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), ) ) infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean", "infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, ) # decompose error into bias + variance", "- 1) return m - h, m + h def", "edp.model_values.cpu().numpy(), ) num_trajectories = actions.shape[0] trajectory_length = actions.shape[1] j_steps =", "[float(\"inf\")] if num_j_steps > 1: j_steps.append(-1) if num_j_steps > 2:", "class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL = 0.9 NUM_BOOTSTRAP_SAMPLES =", "num_trajectories = len(rewards) j_step = int(min(j_step, trajectory_length - 1)) weighted_discounts", "np.sum(np.multiply(rewards, discounts), axis=1) denominator = np.nanmean(episode_values) if abs(denominator) < 1e-6:", "action_trajectories = [] reward_trajectories = [] logged_propensity_trajectories = [] target_propensity_trajectories", "= ( np.ones([num_trajectories, 1]) * 1.0 / num_trajectories ) importance_weights_one_earlier", ": j_step + 1] ) - np.multiply( weighted_discounts_one_earlier[:, : j_step", "se = np.mean(x), sp.stats.sem(x) h = se * sp.stats.t._ppf((1 +", "and its affiliates. All rights reserved. import itertools import logging", "BOOTSTRAP_SAMPLE_PCT = 0.5 def __init__(self, gamma): self.gamma = gamma def", "target_propensity_for_logged_action / logged_propensities importance_weights = np.cumprod(importance_weights, axis=1) importance_weights = WeightedSequentialDoublyRobustEstimator.normalize_importance_weights(", "+ 1], estimated_state_values[:, : j_step + 1], ), axis=1, )", "1: j_steps.append(-1) if num_j_steps > 2: interval = trajectory_length //", "1.0} x = np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss, x, args=error,", "direct_method_value = np.zeros([num_trajectories]) control_variate = np.sum( np.multiply( weighted_discounts[:, : j_step", "weighted_doubly_robust standard error bootstrapped_means = [] sample_size = int( WeightedSequentialDoublyRobustEstimator.BOOTSTRAP_SAMPLE_PCT", "np.mean(x), sp.stats.sem(x) h = se * sp.stats.t._ppf((1 + confidence) /", "np.where(sum_importance_weights == 0.0)[0] sum_importance_weights[where_zeros] = len(importance_weights) importance_weights[:, where_zeros] = 1.0", "subsets to estimate confidence bounds infinite_step_returns = [] num_subsets =", "minimize mse error constraint = {\"type\": \"eq\", \"fun\": lambda x:", "logged_propensity_trajectories = [] target_propensity_trajectories = [] Q_value_trajectories = [] for", "[] Q_value_trajectories = [] for trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory])", "reward_trajectories = to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1 )", "def transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities, target_propensities, estimated_q_values, ): \"\"\"", "sp.stats.sem(x) h = se * sp.stats.t._ppf((1 + confidence) / 2.0,", "target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(),", "Inc. and its affiliates. All rights reserved. import itertools import", "Facebook, Inc. and its affiliates. All rights reserved. import itertools", "into bias + variance j_step_bias = np.zeros([num_j_steps]) where_lower = np.where(j_step_returns", "import CpeEstimate from ml.rl.evaluation.evaluation_data_page import EvaluationDataPage logger = logging.getLogger(__name__) logger.setLevel(logging.INFO)", "WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(), edp.model_propensities.cpu().numpy(), edp.model_values.cpu().numpy(), ) num_trajectories =", "num_trajectories / num_subsets for i in range(num_subsets): trajectory_subset = np.arange(", "int(min(j_step, trajectory_length - 1)) weighted_discounts = np.multiply(discounts, importance_weights) weighted_discounts_one_earlier =", "- control_variate ) return j_step_return @staticmethod def confidence_bounds(x, confidence): n", "error = covariance + j_step_bias.T * j_step_bias # minimize mse", "n = len(x) m, se = np.mean(x), sp.stats.sem(x) h =", "estimate confidence bounds infinite_step_returns = [] num_subsets = int( min(", "\"\"\" num_actions = len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for x in", "j_step_return_trajectories = np.array(j_step_return_trajectories) j_step_returns = np.sum(j_step_return_trajectories, axis=1) if len(j_step_returns) ==", "= len(rewards) j_step = int(min(j_step, trajectory_length - 1)) weighted_discounts =", "0.5 def __init__(self, gamma): self.gamma = gamma def estimate( self,", "= np.zeros(mdp_ids.shape[0]) for x in range(0, mdp_ids.shape[0]): if x +", "actions.shape[1] j_steps = [float(\"inf\")] if num_j_steps > 1: j_steps.append(-1) if", "__init__(self, gamma): self.gamma = gamma def estimate( self, edp: EvaluationDataPage,", "= np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint, bounds=[(0,", "interval = num_trajectories / num_subsets for i in range(num_subsets): trajectory_subset", "/ num_subsets for i in range(num_subsets): trajectory_subset = np.arange( int(i", "logged_propensities, target_propensities, estimated_q_values, ) = WeightedSequentialDoublyRobustEstimator.transform_to_equal_length_trajectories( edp.mdp_id, edp.action_mask.cpu().numpy(), edp.logged_rewards.cpu().numpy().flatten(), edp.logged_propensities.cpu().numpy().flatten(),", "logging.getLogger(__name__) logger.setLevel(logging.INFO) class WeightedSequentialDoublyRobustEstimator: NUM_SUBSETS_FOR_CB_ESTIMATES = 25 CONFIDENCE_INTERVAL = 0.9", "raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust /", "CpeEstimate( raw=weighted_doubly_robust, normalized=weighted_doubly_robust / denominator, raw_std_error=weighted_doubly_robust_std_error, normalized_std_error=weighted_doubly_robust_std_error / denominator, )", "import logging import numpy as np import scipy as sp", "= np.sum( np.multiply(estimated_q_values, actions), axis=2 ) estimated_state_values = np.sum( np.multiply(target_propensities,", "j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound covariance = np.cov(j_step_return_trajectories) error =", "!= mdp_ids[x + 1, 0]: terminals[x] = 1 trajectories =", "= to_equal_length(reward_trajectories, 0) logged_propensity_trajectories = to_equal_length( logged_propensity_trajectories, 1 ) target_propensity_trajectories", "1) for _ in range(x.shape[0])], ) x = np.array(res.x) return", "sp.stats.t._ppf((1 + confidence) / 2.0, n - 1) return m", "== 1: weighted_doubly_robust = j_step_returns[0] weighted_doubly_robust_std_error = 0.0 else: #", "in episode_ends: episode_ends = np.append(episode_ends, len(terminals) - 1) for episode_end", "Take in samples (action, rewards, propensities, etc.) and output lists", "): trajectory_length = len(rewards[0]) num_trajectories = len(rewards) j_step = int(min(j_step,", "if j_step < trajectory_length - 1: direct_method_value = ( weighted_discounts_one_earlier[:,", "reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights", "= np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return x_equal_length action_trajectories =", "np.nonzero(terminals)[0] if len(terminals) - 1 not in episode_ends: episode_ends =", "np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss, x, args=error, constraints=constraint, bounds=[(0, 1)", ") @staticmethod def normalize_importance_weights( importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights", "importance_weights @staticmethod def calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values,", "np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length( Q_value_trajectories, np.zeros([num_actions]) ) return (", "np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return = np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset],", "interval for i in range(1, num_j_steps - 1)]) target_propensity_for_logged_action =", "abs(denominator) < 1e-6: return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 )", "= 0.5 def __init__(self, gamma): self.gamma = gamma def estimate(", "# For details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8 (", "importance_weights, whether_self_normalize_importance_weights ): if whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros", "wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs],", "# Use bootstrapping to compute weighted_doubly_robust standard error bootstrapped_means =", "\"fun\": lambda x: np.sum(x) - 1.0} x = np.zeros([len(j_steps)]) res", "m + h def mse_loss(x, error): return np.dot(np.dot(x, error), x.T)", "estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights = target_propensity_for_logged_action", "or mdp_ids[x, 0] != mdp_ids[x + 1, 0]: terminals[x] =", "if num_j_steps > 1: j_steps.append(-1) if num_j_steps > 2: interval", "1]) * 1.0 / num_trajectories ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier,", "= len(importance_weights) importance_weights[:, where_zeros] = 1.0 importance_weights /= sum_importance_weights return", "1] ) else: direct_method_value = np.zeros([num_trajectories]) control_variate = np.sum( np.multiply(", "# Copyright (c) Facebook, Inc. and its affiliates. All rights", "2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval = num_trajectories / num_subsets for", "j_steps, num_j_steps, j_step_returns, infinite_step_returns, j_step_return_trajectories, ) # Use bootstrapping to", "np.sum( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards[trajectory_subset], discounts, importance_weights, importance_weights_one_earlier, estimated_state_values[trajectory_subset], estimated_q_values_for_logged_action[trajectory_subset], float(\"inf\"), )", "1 == mdp_ids.shape[0] or mdp_ids[x, 0] != mdp_ids[x + 1,", "trajectory in trajectories: action_trajectories.append(actions[trajectory]) reward_trajectories.append(rewards[trajectory]) logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x,", "to_equal_length(x, fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x, fillvalue=fill_value)) ).swapaxes(0, 1) return", "j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights, importance_weights_one_earlier, estimated_state_values,", "j_steps.extend([i * interval for i in range(1, num_j_steps - 1)])", "variance j_step_bias = np.zeros([num_j_steps]) where_lower = np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower]", "mdp_ids, actions, rewards, logged_propensities, target_propensities, estimated_q_values, ): \"\"\" Take in", "= num_trajectories / num_subsets for i in range(num_subsets): trajectory_subset =", "_ in range(x.shape[0])], ) x = np.array(res.x) return float(np.dot(x, j_step_returns))", "https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8 ( actions, rewards, logged_propensities, target_propensities,", "@staticmethod def transform_to_equal_length_trajectories( mdp_ids, actions, rewards, logged_propensities, target_propensities, estimated_q_values, ):", "= [] num_subsets = int( min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES,", "weighted_discounts_one_earlier[:, j_step + 1] * estimated_state_values[:, j_step + 1] )", "/ 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval = num_trajectories / num_subsets", "decompose error into bias + variance j_step_bias = np.zeros([num_j_steps]) where_lower", ") return ( action_trajectories, reward_trajectories, logged_propensity_trajectories, target_propensity_trajectories, Q_value_trajectories, ) @staticmethod", "0 episode_ends = np.nonzero(terminals)[0] if len(terminals) - 1 not in", ") ) interval = num_trajectories / num_subsets for i in", "1.0 / num_trajectories ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]]", "else: importance_weights /= importance_weights.shape[0] return importance_weights @staticmethod def calculate_step_return( rewards,", ") # decompose error into bias + variance j_step_bias =", "1)]) target_propensity_for_logged_action = np.sum( np.multiply(target_propensities, actions), axis=2 ) estimated_q_values_for_logged_action =", "= np.nanmean(episode_values) if abs(denominator) < 1e-6: return CpeEstimate( raw=0.0, normalized=0.0,", ") j_step_return_trajectories = [] for j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return(", ") estimated_state_values = np.sum( np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights =", "+ 1 action_trajectories = [] reward_trajectories = [] logged_propensity_trajectories =", "sample_size, replace=False) random_idxs.sort() wdr_estimate = self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in", "np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher] - high_bound covariance =", "[] reward_trajectories = [] logged_propensity_trajectories = [] target_propensity_trajectories = []", "m - h, m + h def mse_loss(x, error): return", "np.multiply(target_propensities, estimated_q_values), axis=2 ) importance_weights = target_propensity_for_logged_action / logged_propensities importance_weights", ") target_propensity_trajectories = to_equal_length( target_propensity_trajectories, np.zeros([num_actions]) ) Q_value_trajectories = to_equal_length(", "ones are filled with zeros(ones) at the end. \"\"\" num_actions", "= ( importance_sampled_cumulative_reward + direct_method_value - control_variate ) return j_step_return", "= np.zeros([num_trajectories]) control_variate = np.sum( np.multiply( weighted_discounts[:, : j_step +", "j_step_returns, infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL,", "infinite_step_returns, j_step_return_trajectories, ): low_bound, high_bound = WeightedSequentialDoublyRobustEstimator.confidence_bounds( infinite_step_returns, WeightedSequentialDoublyRobustEstimator.CONFIDENCE_INTERVAL, )", "- 1.0} x = np.zeros([len(j_steps)]) res = sp.optimize.minimize( mse_loss, x,", ": j_step + 1], estimated_state_values[:, : j_step + 1], ),", "= int( min( num_trajectories / 2, WeightedSequentialDoublyRobustEstimator.NUM_SUBSETS_FOR_CB_ESTIMATES, ) ) interval", "[] for j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards, discounts, importance_weights,", "output lists of equal-length trajectories (episodes) accoriding to terminals. As", "action_trajectories = to_equal_length( action_trajectories, np.zeros([num_actions]) ) reward_trajectories = to_equal_length(reward_trajectories, 0)", "import scipy as sp import torch from ml.rl.evaluation.cpe import CpeEstimate", "are filled with zeros(ones) at the end. \"\"\" num_actions =", "# break trajectories into several subsets to estimate confidence bounds", "= np.zeros([num_j_steps]) where_lower = np.where(j_step_returns < low_bound)[0] j_step_bias[where_lower] = low_bound", "- high_bound covariance = np.cov(j_step_return_trajectories) error = covariance + j_step_bias.T", "details, visit https://arxiv.org/pdf/1604.00923.pdf Section 5, 7, 8 ( actions, rewards,", "= [] episode_start = 0 episode_ends = np.nonzero(terminals)[0] if len(terminals)", "importance_sampled_cumulative_reward = np.sum( np.multiply(weighted_discounts[:, : j_step + 1], rewards[:, :", "j_step_return_trajectories = [] for j_step in j_steps: j_step_return_trajectories.append( WeightedSequentialDoublyRobustEstimator.calculate_step_return( rewards,", "direct_method_value = ( weighted_discounts_one_earlier[:, j_step + 1] * estimated_state_values[:, j_step", "j_step + 1] ) else: direct_method_value = np.zeros([num_trajectories]) control_variate =", "lists of equal-length trajectories (episodes) accoriding to terminals. As the", "target_propensities, estimated_q_values, ): \"\"\" Take in samples (action, rewards, propensities,", "infinite_step_returns.append(infinite_step_return) # Compute weighted_doubly_robust mean point estimate using all data", "for i in range(1, num_j_steps - 1)]) target_propensity_for_logged_action = np.sum(", "- j_step_returns[where_lower] where_higher = np.where(j_step_returns > high_bound)[0] j_step_bias[where_higher] = j_step_returns[where_higher]", "logged_propensity_trajectories.append(logged_propensities[trajectory]) target_propensity_trajectories.append(target_propensities[trajectory]) Q_value_trajectories.append(estimated_q_values[trajectory]) def to_equal_length(x, fill_value): x_equal_length = np.array( list(itertools.zip_longest(*x,", "return CpeEstimate( raw=0.0, normalized=0.0, raw_std_error=0.0, normalized_std_error=0.0 ) return CpeEstimate( raw=weighted_doubly_robust,", "discounts = np.logspace( start=0, stop=trajectory_length - 1, num=trajectory_length, base=self.gamma )", "len(trajectory_subset) ) importance_weights_one_earlier = np.hstack( [importance_weights_one_earlier, importance_weights[:, :-1]] ) infinite_step_return", "the end. \"\"\" num_actions = len(target_propensities[0]) terminals = np.zeros(mdp_ids.shape[0]) for", "whether_self_normalize_importance_weights: sum_importance_weights = np.sum(importance_weights, axis=0) where_zeros = np.where(sum_importance_weights == 0.0)[0]", "Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.", "= self.compute_weighted_doubly_robust_point_estimate( j_steps=[j_steps[i] for i in random_idxs], num_j_steps=sample_size, j_step_returns=j_step_returns[random_idxs], infinite_step_returns=infinite_step_returns,", "len(terminals) - 1 not in episode_ends: episode_ends = np.append(episode_ends, len(terminals)", "import itertools import logging import numpy as np import scipy", "(c) Facebook, Inc. and its affiliates. All rights reserved. import", "constraint = {\"type\": \"eq\", \"fun\": lambda x: np.sum(x) - 1.0}", "importance_weights, importance_weights_one_earlier, estimated_state_values, estimated_q_values_for_logged_action, j_step, ) ) j_step_return_trajectories = np.array(j_step_return_trajectories)" ]
[ "from typing import List class Solution: def __init__(self, nums: List[int]):", "copy.copy(nums) def reset(self) -> List[int]: \"\"\" Resets the array to", "Modified time: 2019-08-03 10:53:15 import copy import random from typing", "何睿 # @Create Date: 2019-08-03 10:48:30 # @Last Modified by:", "List[int]: \"\"\" Resets the array to its original configuration and", "Returns a random shuffling of the array. \"\"\" random.shuffle(self.shuffle_) return", "Modified by: 何睿 # @Last Modified time: 2019-08-03 10:53:15 import", "何睿 # @Last Modified time: 2019-08-03 10:53:15 import copy import", "-> List[int]: \"\"\" Resets the array to its original configuration", "@Create Date: 2019-08-03 10:48:30 # @Last Modified by: 何睿 #", "def __init__(self, nums: List[int]): self.shuffle_ = nums self.original = copy.copy(nums)", "10:48:30 # @Last Modified by: 何睿 # @Last Modified time:", "= copy.copy(nums) def reset(self) -> List[int]: \"\"\" Resets the array", "import copy import random from typing import List class Solution:", "utf-8 -*- # @Author: 何睿 # @Create Date: 2019-08-03 10:48:30", "\"\"\" Returns a random shuffling of the array. \"\"\" random.shuffle(self.shuffle_)", "Date: 2019-08-03 10:48:30 # @Last Modified by: 何睿 # @Last", "original configuration and return it. \"\"\" return self.original def shuffle(self)", "def shuffle(self) -> List[int]: \"\"\" Returns a random shuffling of", "it. \"\"\" return self.original def shuffle(self) -> List[int]: \"\"\" Returns", "-*- # @Author: 何睿 # @Create Date: 2019-08-03 10:48:30 #", "by: 何睿 # @Last Modified time: 2019-08-03 10:53:15 import copy", "List[int]: \"\"\" Returns a random shuffling of the array. \"\"\"", "# @Last Modified time: 2019-08-03 10:53:15 import copy import random", "self.shuffle_ = nums self.original = copy.copy(nums) def reset(self) -> List[int]:", "@Author: 何睿 # @Create Date: 2019-08-03 10:48:30 # @Last Modified", "return it. \"\"\" return self.original def shuffle(self) -> List[int]: \"\"\"", "import random from typing import List class Solution: def __init__(self,", "import List class Solution: def __init__(self, nums: List[int]): self.shuffle_ =", "a random shuffling of the array. \"\"\" random.shuffle(self.shuffle_) return self.shuffle_", "its original configuration and return it. \"\"\" return self.original def", "= nums self.original = copy.copy(nums) def reset(self) -> List[int]: \"\"\"", "\"\"\" return self.original def shuffle(self) -> List[int]: \"\"\" Returns a", "\"\"\" Resets the array to its original configuration and return", "array to its original configuration and return it. \"\"\" return", "nums self.original = copy.copy(nums) def reset(self) -> List[int]: \"\"\" Resets", "shuffle(self) -> List[int]: \"\"\" Returns a random shuffling of the", "# @Create Date: 2019-08-03 10:48:30 # @Last Modified by: 何睿", "List class Solution: def __init__(self, nums: List[int]): self.shuffle_ = nums", "# -*- coding: utf-8 -*- # @Author: 何睿 # @Create", "-> List[int]: \"\"\" Returns a random shuffling of the array.", "and return it. \"\"\" return self.original def shuffle(self) -> List[int]:", "reset(self) -> List[int]: \"\"\" Resets the array to its original", "time: 2019-08-03 10:53:15 import copy import random from typing import", "to its original configuration and return it. \"\"\" return self.original", "copy import random from typing import List class Solution: def", "Solution: def __init__(self, nums: List[int]): self.shuffle_ = nums self.original =", "Resets the array to its original configuration and return it.", "configuration and return it. \"\"\" return self.original def shuffle(self) ->", "-*- coding: utf-8 -*- # @Author: 何睿 # @Create Date:", "the array to its original configuration and return it. \"\"\"", "@Last Modified by: 何睿 # @Last Modified time: 2019-08-03 10:53:15", "@Last Modified time: 2019-08-03 10:53:15 import copy import random from", "class Solution: def __init__(self, nums: List[int]): self.shuffle_ = nums self.original", "__init__(self, nums: List[int]): self.shuffle_ = nums self.original = copy.copy(nums) def", "random from typing import List class Solution: def __init__(self, nums:", "self.original = copy.copy(nums) def reset(self) -> List[int]: \"\"\" Resets the", "self.original def shuffle(self) -> List[int]: \"\"\" Returns a random shuffling", "List[int]): self.shuffle_ = nums self.original = copy.copy(nums) def reset(self) ->", "coding: utf-8 -*- # @Author: 何睿 # @Create Date: 2019-08-03", "# @Last Modified by: 何睿 # @Last Modified time: 2019-08-03", "2019-08-03 10:53:15 import copy import random from typing import List", "10:53:15 import copy import random from typing import List class", "def reset(self) -> List[int]: \"\"\" Resets the array to its", "typing import List class Solution: def __init__(self, nums: List[int]): self.shuffle_", "nums: List[int]): self.shuffle_ = nums self.original = copy.copy(nums) def reset(self)", "# @Author: 何睿 # @Create Date: 2019-08-03 10:48:30 # @Last", "return self.original def shuffle(self) -> List[int]: \"\"\" Returns a random", "2019-08-03 10:48:30 # @Last Modified by: 何睿 # @Last Modified" ]
[ "self.n_features_ represents the number of features selected (=number of clusters)", "\"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1", "return linkage def _hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical clustering Parameters", "self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals, __ =", "value: 0 Returns ---------- pipeline - feature selection pipeline \"\"\"", "cluster and contains the indexes of features belonging to the", "of clusters) The attribute self.selected_features_ is a list of indexes", "the highest chi2 score versus y. The attribute self.n_features_ represents", "represents a cluster and contains the indexes of features belonging", "between each two features in X, each value is 1-phi_correlation", "each two features in X, each value is 1-phi_correlation \"\"\"", "column Returns ---------- phi coefficient value \"\"\" confusion_matrix = pd.crosstab(x,", "linkage = self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X,", "def _phi_coef(self, x, y): \"\"\" Calculates phi coefficient between features", "attributes self.n_features_ = X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in", "cluster in clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self", "Creates feature selection pipeline Parameters ---------- k - the k", "dendogram created by hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a list of", "feature selection pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best',", "feature selection pipeline Parameters ---------- k - the k parameter", "the selected features \"\"\" if not self.dist_matrix: self._calc_dist_matrix(X) linkage =", "\"\"\" if not self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters =", "from scipy.stats import chi2_contingency from sklearn.base import BaseEstimator from sklearn.ensemble", "sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] = 1 return", "self.dist_matrix and self.threshold, and selects a feature from each cluster", "in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self, X, y):", "with the highest chi2 score versus y. The attribute self.n_features_", "from sklearn.base import BaseEstimator from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text", "selection pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2,", "y. The attribute self.n_features_ represents the number of features selected", "Calculate distance matrix between each two features in X, each", "versus y. The attribute self.n_features_ represents the number of features", "np.sqrt(chi2 / n) return corr def _calc_dist_matrix(self, X): \"\"\" Calculate", "_calc_dist_matrix(self, X): \"\"\" Calculate distance matrix between each two features", "---------- phi coefficient value \"\"\" confusion_matrix = pd.crosstab(x, y) chi2", "X): \"\"\" Calculate distance matrix between each two features in", "self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals, __", "if not self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters = self._hierarchical_clustering(linkage)", "belonging to the cluster \"\"\" # array of len(X) -", "---------- x - feature x column y - feature y", "clustering random_state - random state for the RandomForestClassifier. Deafult value:", "1 - X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def", "class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer that clusters the features", "SelectorMixin from sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A", "of fitted attributes (ending with a trailing # underscore) and", "of indexes that correspond to the selected features \"\"\" if", "of len(X) - array[i] is the cluster number to which", "clustering Parameters ---------- linkage - linkage dendogram created by hierarchy.linkage(self.distance_matrix,", "list(cluster_id_to_feature_idx.values()) def fit(self, X, y): \"\"\" Clusters the features (X", "for idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def", "mask = np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] = 1 return mask", "features \"\"\" if not self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters", "are selected Returns ---------- mask - boolean array of shape", "Parameters ---------- x - feature x column y - feature", "pd import scipy.spatial.distance as ssd from scipy.cluster import hierarchy from", "transformer that clusters the features in X according to dist_matrix,", "1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix =", "of features belonging to the cluster \"\"\" # array of", "# underscore) and otherwise raises a NotFittedError with the given", "linkage = hierarchy.linkage(self.dist_matrix, method=method) return linkage def _hierarchical_clustering(self, linkage): \"\"\"", "import numpy as np import sklearn import pandas as pd", "cluster \"\"\" # array of len(X) - array[i] is the", "import sklearn import pandas as pd import scipy.spatial.distance as ssd", "linkage def _hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical clustering Parameters ----------", "= np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] = 1 return mask def", "BaseEstimator): \"\"\" A transformer that clusters the features in X", "The attribute self.selected_features_ is a list of indexes that correspond", "Returns ---------- pipeline - feature selection pipeline \"\"\" pipeline =", "not self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals,", "feature_corr_dist_matrix = 1 - X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix =", "\"\"\" # array of len(X) - array[i] is the cluster", "Returns ---------- phi coefficient value \"\"\" confusion_matrix = pd.crosstab(x, y)", "indexes that correspond to the selected features \"\"\" if not", "features Parameters ---------- x - feature x column y -", "self.threshold, criterion='distance') cluster_id_to_feature_idx = {} for idx, cluster_id in enumerate(cluster_ids):", "self.n_features_ = X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in clusters]", "is fitted by verifying the presence of fitted attributes (ending", "print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def _get_support_mask(self): \"\"\" Get the boolean", "A transformer that clusters the features in X according to", "raises a NotFittedError with the given message. sklearn.utils.validation.check_is_fitted(self) mask =", "clustering threshold for the Hierarchial clustering random_state - random state", "np import sklearn import pandas as pd import scipy.spatial.distance as", "the cluster \"\"\" # array of len(X) - array[i] is", "\"\"\" confusion_matrix = pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0] n =", "attributes (ending with a trailing # underscore) and otherwise raises", "chi2 score versus y. The attribute self.n_features_ represents the number", "[# input features] An element is True iff its corresponding", "to dist_matrix, and selects a feature from each cluster with", "_hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical clustering Parameters ---------- linkage -", "in X, each value is 1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X)", "each cluster with the highest chi2 score versus y. The", "def _get_support_mask(self): \"\"\" Get the boolean mask indicating which features", "SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer that clusters the features in", "Get the boolean mask indicating which features are selected Returns", "of lists, each list represents a cluster and contains the", "sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals) # fitted attributes self.n_features_ =", "numpy as np import sklearn import pandas as pd import", "feature x column y - feature y column Returns ----------", "presence of fitted attributes (ending with a trailing # underscore)", "\"\"\" Clusters the features (X columns) using self.dist_matrix and self.threshold,", "mask[self.selected_features_] = 1 return mask def get_fs_pipeline(k, threshold, random_state=0): \"\"\"", "- feature selection pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)),", "- feature x column y - feature y column Returns", "mask - boolean array of shape [# input features] An", "import scipy.spatial.distance as ssd from scipy.cluster import hierarchy from scipy.stats", "from each cluster with the highest chi2 score of X[feature]", "chi2 = chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr = np.sqrt(chi2 /", "X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'):", "feature from each cluster with the highest chi2 score versus", "selected features \"\"\" if not self.dist_matrix: self._calc_dist_matrix(X) linkage = self._corr_linkage()", "and contains the indexes of features belonging to the cluster", "= chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr = np.sqrt(chi2 / n)", "features function threshold - clustering threshold for the Hierarchial clustering", "self def _get_support_mask(self): \"\"\" Get the boolean mask indicating which", "(ending with a trailing # underscore) and otherwise raises a", "state for the RandomForestClassifier. Deafult value: 0 Returns ---------- pipeline", "dist_matrix, and selects a feature from each cluster with the", "features (X columns) using self.dist_matrix and self.threshold, and selects a", "- clustering threshold for the Hierarchial clustering random_state - random", "method=method) Returns ---------- a list of lists, each list represents", "[]).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self, X, y): \"\"\" Clusters the", "Deafult value: 0 Returns ---------- pipeline - feature selection pipeline", "if the estimator is fitted by verifying the presence of", "RandomForestClassifier. Deafult value: 0 Returns ---------- pipeline - feature selection", "corr def _calc_dist_matrix(self, X): \"\"\" Calculate distance matrix between each", "score of X[feature] versus y \"\"\" def __init__(self, dist_matrix=None, threshold=1):", "---------- pipeline - feature selection pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize',", "= hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx = {} for idx, cluster_id", "self.dist_matrix = dist_matrix self.threshold = threshold def _phi_coef(self, x, y):", "X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in clusters] self.clusters_ =", "Hierarchial clustering random_state - random state for the RandomForestClassifier. Deafult", "fitted attributes (ending with a trailing # underscore) and otherwise", "its corresponding feature is selected for retention. \"\"\" # Checks", "features] An element is True iff its corresponding feature is", "The attribute self.n_features_ represents the number of features selected (=number", "y): \"\"\" Calculates phi coefficient between features Parameters ---------- x", "fitted attributes self.n_features_ = X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster", "that clusters the features in X according to dist_matrix, and", "list of indexes that correspond to the selected features \"\"\"", "[chi2_vals[cluster].idxmax() for cluster in clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}')", "a list of lists, each list represents a cluster and", "---------- k - the k parameter for the SelectKBest features", "= [chi2_vals[cluster].idxmax() for cluster in clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f},", "Checks if the estimator is fitted by verifying the presence", "the cluster number to which sample i belongs cluster_ids =", "# array of len(X) - array[i] is the cluster number", "a list of indexes that correspond to the selected features", "x - feature x column y - feature y column", "correspond to the selected features \"\"\" if not self.dist_matrix: self._calc_dist_matrix(X)", "the given message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_]", "X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 - X_corr_mat feature_corr_dist_matrix_condensed =", "boolean mask indicating which features are selected Returns ---------- mask", "from sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline class", "in X according to dist_matrix, and selects a feature from", "the features (X columns) using self.dist_matrix and self.threshold, and selects", "the highest chi2 score of X[feature] versus y \"\"\" def", "i belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx = {}", "confusion_matrix = pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum()", "= feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method) return", "cluster with the highest chi2 score versus y. The attribute", "import SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator):", "def fit(self, X, y): \"\"\" Clusters the features (X columns)", "\"\"\" Calculates phi coefficient between features Parameters ---------- x -", "X, y): \"\"\" Clusters the features (X columns) using self.dist_matrix", "of shape [# input features] An element is True iff", "import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer that clusters", "return list(cluster_id_to_feature_idx.values()) def fit(self, X, y): \"\"\" Clusters the features", "0 Returns ---------- pipeline - feature selection pipeline \"\"\" pipeline", "import pandas as pd import scipy.spatial.distance as ssd from scipy.cluster", "selection pipeline Parameters ---------- k - the k parameter for", "highest chi2 score versus y. The attribute self.n_features_ represents the", "pandas as pd import scipy.spatial.distance as ssd from scipy.cluster import", "which features are selected Returns ---------- mask - boolean array", "self.threshold = threshold def _phi_coef(self, x, y): \"\"\" Calculates phi", "and self.threshold, and selects a feature from each cluster with", "lists, each list represents a cluster and contains the indexes", "random_state - random state for the RandomForestClassifier. Deafult value: 0", "X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 - X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix", "that correspond to the selected features \"\"\" if not self.dist_matrix:", "Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer that clusters the", "from each cluster with the highest chi2 score versus y.", "dist_matrix self.threshold = threshold def _phi_coef(self, x, y): \"\"\" Calculates", "for the SelectKBest features function threshold - clustering threshold for", "chi2_contingency from sklearn.base import BaseEstimator from sklearn.ensemble import RandomForestClassifier from", "coefficient value \"\"\" confusion_matrix = pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0]", "), dtype=bool) mask[self.selected_features_] = 1 return mask def get_fs_pipeline(k, threshold,", "as pd import scipy.spatial.distance as ssd from scipy.cluster import hierarchy", "columns) using self.dist_matrix and self.threshold, and selects a feature from", "a cluster and contains the indexes of features belonging to", "feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method) return linkage", "according to dist_matrix, and selects a feature from each cluster", "attribute self.selected_features_ is a list of indexes that correspond to", "array of len(X) - array[i] is the cluster number to", "def _hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical clustering Parameters ---------- linkage", "Parameters ---------- k - the k parameter for the SelectKBest", "sample i belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx =", "BaseEstimator from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from", "\"\"\" Get the boolean mask indicating which features are selected", "- X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self,", "message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] = 1", "\"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster',", "threshold, random_state=0): \"\"\" Creates feature selection pipeline Parameters ---------- k", "selects a feature from each cluster with the highest chi2", "array of shape [# input features] An element is True", "CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline", "self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method)", "indicating which features are selected Returns ---------- mask - boolean", "= 1 - X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed", "highest chi2 score of X[feature] versus y \"\"\" def __init__(self,", "features in X, each value is 1-phi_correlation \"\"\" X_df =", "linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a list", "features are selected Returns ---------- mask - boolean array of", "import hierarchy from scipy.stats import chi2_contingency from sklearn.base import BaseEstimator", "each value is 1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat =", "= ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage =", "return mask def get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates feature selection", "cluster_id_to_feature_idx = {} for idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx)", "= 1 return mask def get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates", "X[feature] versus y \"\"\" def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix =", "__ = sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals) # fitted attributes", "def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix self.threshold = threshold", "shape [# input features] An element is True iff its", "from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection", "k - the k parameter for the SelectKBest features function", "ssd from scipy.cluster import hierarchy from scipy.stats import chi2_contingency from", "# fitted attributes self.n_features_ = X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for", "= threshold def _phi_coef(self, x, y): \"\"\" Calculates phi coefficient", "k parameter for the SelectKBest features function threshold - clustering", "function threshold - clustering threshold for the Hierarchial clustering random_state", "mask indicating which features are selected Returns ---------- mask -", "element is True iff its corresponding feature is selected for", "for the RandomForestClassifier. Deafult value: 0 Returns ---------- pipeline -", "def _calc_dist_matrix(self, X): \"\"\" Calculate distance matrix between each two", "= self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals)", "distance matrix between each two features in X, each value", "---------- a list of lists, each list represents a cluster", "cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx = {} for idx,", "list represents a cluster and contains the indexes of features", "contains the indexes of features belonging to the cluster \"\"\"", "threshold def _phi_coef(self, x, y): \"\"\" Calculates phi coefficient between", "number to which sample i belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold,", "idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self,", "self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in clusters] self.clusters_ = clusters", "CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)), ('rf', RandomForestClassifier(random_state=random_state))]) return", "binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)), ('rf', RandomForestClassifier(random_state=random_state))]) return pipeline", "as ssd from scipy.cluster import hierarchy from scipy.stats import chi2_contingency", "NotFittedError with the given message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ),", "the k parameter for the SelectKBest features function threshold -", "= clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def _get_support_mask(self): \"\"\" Get", "X, each value is 1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat", "clusters) The attribute self.selected_features_ is a list of indexes that", "- feature y column Returns ---------- phi coefficient value \"\"\"", "enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self, X, y): \"\"\"", "indexes of features belonging to the cluster \"\"\" # array", "y \"\"\" def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix self.threshold", "is the cluster number to which sample i belongs cluster_ids", "the number of features selected (=number of clusters) The attribute", "fit(self, X, y): \"\"\" Clusters the features (X columns) using", "in clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def", "given message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] =", "sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import", "self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def _get_support_mask(self): \"\"\"", "attribute self.n_features_ represents the number of features selected (=number of", "= {} for idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return", "feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage", "SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\"", "corr = np.sqrt(chi2 / n) return corr def _calc_dist_matrix(self, X):", "iff its corresponding feature is selected for retention. \"\"\" #", "for the Hierarchial clustering random_state - random state for the", "the features in X according to dist_matrix, and selects a", "mask def get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates feature selection pipeline", "value is 1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef)", "Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)), ('rf', RandomForestClassifier(random_state=random_state))])", "dtype=bool) mask[self.selected_features_] = 1 return mask def get_fs_pipeline(k, threshold, random_state=0):", "two features in X, each value is 1-phi_correlation \"\"\" X_df", "= hierarchy.linkage(self.dist_matrix, method=method) return linkage def _hierarchical_clustering(self, linkage): \"\"\" Perform", "clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def _get_support_mask(self): \"\"\" Get the", "with a trailing # underscore) and otherwise raises a NotFittedError", "pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)),", "\"\"\" def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix self.threshold =", "hierarchy from scipy.stats import chi2_contingency from sklearn.base import BaseEstimator from", "chi2_vals = pd.Series(chi2_vals) # fitted attributes self.n_features_ = X.shape[1] self.selected_features_", "def _corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method) return linkage def", "X according to dist_matrix, and selects a feature from each", "threshold - clustering threshold for the Hierarchial clustering random_state -", "the RandomForestClassifier. Deafult value: 0 Returns ---------- pipeline - feature", "{} for idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values())", "= Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)), ('rf',", "y) chi2 = chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr = np.sqrt(chi2", "features selected (=number of clusters) The attribute self.selected_features_ is a", "n = confusion_matrix.sum().sum() corr = np.sqrt(chi2 / n) return corr", "= X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax() for cluster in clusters] self.clusters_", "- boolean array of shape [# input features] An element", "otherwise raises a NotFittedError with the given message. sklearn.utils.validation.check_is_fitted(self) mask", "pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 - X_corr_mat feature_corr_dist_matrix_condensed", "(=number of clusters) The attribute self.selected_features_ is a list of", "= pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 - X_corr_mat", "matrix between each two features in X, each value is", "y): \"\"\" Clusters the features (X columns) using self.dist_matrix and", "sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin,", "An element is True iff its corresponding feature is selected", "= np.sqrt(chi2 / n) return corr def _calc_dist_matrix(self, X): \"\"\"", "cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self, X, y): \"\"\" Clusters", "= dist_matrix self.threshold = threshold def _phi_coef(self, x, y): \"\"\"", "trailing # underscore) and otherwise raises a NotFittedError with the", "self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals) #", "phi coefficient value \"\"\" confusion_matrix = pd.crosstab(x, y) chi2 =", "y - feature y column Returns ---------- phi coefficient value", "of features selected (=number of clusters) The attribute self.selected_features_ is", "for cluster in clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return", "features in X according to dist_matrix, and selects a feature", "with the given message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_, ), dtype=bool)", "self.selected_features_ is a list of indexes that correspond to the", "x, y): \"\"\" Calculates phi coefficient between features Parameters ----------", "pd.Series(chi2_vals) # fitted attributes self.n_features_ = X.shape[1] self.selected_features_ = [chi2_vals[cluster].idxmax()", "from sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer", "import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import SelectKBest,", "Calculates phi coefficient between features Parameters ---------- x - feature", "1 return mask def get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates feature", "between features Parameters ---------- x - feature x column y", "belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx = {} for", "criterion='distance') cluster_id_to_feature_idx = {} for idx, cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id,", "- the k parameter for the SelectKBest features function threshold", "True iff its corresponding feature is selected for retention. \"\"\"", "linkage - linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method) Returns ----------", "scipy.cluster import hierarchy from scipy.stats import chi2_contingency from sklearn.base import", "import CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline import", "to the cluster \"\"\" # array of len(X) - array[i]", "= confusion_matrix.sum().sum() corr = np.sqrt(chi2 / n) return corr def", "from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin from", "clusters = self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X, y) chi2_vals =", "sklearn.base import BaseEstimator from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import", "is selected for retention. \"\"\" # Checks if the estimator", "sklearn.pipeline import Pipeline class SelectHierarchicalClustering(SelectorMixin, BaseEstimator): \"\"\" A transformer that", "each list represents a cluster and contains the indexes of", "(X columns) using self.dist_matrix and self.threshold, and selects a feature", "self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X, y) chi2_vals", "pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr =", "get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates feature selection pipeline Parameters ----------", "- array[i] is the cluster number to which sample i", "array[i] is the cluster number to which sample i belongs", "Returns ---------- a list of lists, each list represents a", "feature from each cluster with the highest chi2 score of", "value \"\"\" confusion_matrix = pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0] n", "threshold=1): self.dist_matrix = dist_matrix self.threshold = threshold def _phi_coef(self, x,", "- linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a", "import chi2_contingency from sklearn.base import BaseEstimator from sklearn.ensemble import RandomForestClassifier", "clusters the features in X according to dist_matrix, and selects", "return corr def _calc_dist_matrix(self, X): \"\"\" Calculate distance matrix between", "versus y \"\"\" def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix", "= X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 - X_corr_mat feature_corr_dist_matrix_condensed = ssd.squareform(feature_corr_dist_matrix)", "boolean array of shape [# input features] An element is", "chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr = np.sqrt(chi2 / n) return", "the boolean mask indicating which features are selected Returns ----------", "method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method) return linkage def _hierarchical_clustering(self, linkage):", "y) chi2_vals = pd.Series(chi2_vals) # fitted attributes self.n_features_ = X.shape[1]", "selected (=number of clusters) The attribute self.selected_features_ is a list", "is True iff its corresponding feature is selected for retention.", "clusters] self.clusters_ = clusters print(f'threshold={self.threshold:.2f}, selected_features={len(self.selected_features_)}') return self def _get_support_mask(self):", "# Checks if the estimator is fitted by verifying the", "_phi_coef(self, x, y): \"\"\" Calculates phi coefficient between features Parameters", "_corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix, method=method) return linkage def _hierarchical_clustering(self,", "__init__(self, dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix self.threshold = threshold def", "fitted by verifying the presence of fitted attributes (ending with", "to which sample i belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance')", "= sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals) # fitted attributes self.n_features_", "created by hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a list of lists,", "pipeline Parameters ---------- k - the k parameter for the", "= self._corr_linkage() clusters = self._hierarchical_clustering(linkage) chi2_vals, __ = sklearn.feature_selection.chi2(X, y)", "cluster number to which sample i belongs cluster_ids = hierarchy.fcluster(linkage,", "\"\"\" Creates feature selection pipeline Parameters ---------- k - the", "\"\"\" A transformer that clusters the features in X according", "self.threshold, and selects a feature from each cluster with the", "_get_support_mask(self): \"\"\" Get the boolean mask indicating which features are", "Returns ---------- mask - boolean array of shape [# input", "Clusters the features (X columns) using self.dist_matrix and self.threshold, and", "- random state for the RandomForestClassifier. Deafult value: 0 Returns", "hierarchy.linkage(self.dist_matrix, method=method) return linkage def _hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical", "as np import sklearn import pandas as pd import scipy.spatial.distance", "= pd.Series(chi2_vals) # fitted attributes self.n_features_ = X.shape[1] self.selected_features_ =", "phi coefficient between features Parameters ---------- x - feature x", "def get_fs_pipeline(k, threshold, random_state=0): \"\"\" Creates feature selection pipeline Parameters", "is a list of indexes that correspond to the selected", "to the selected features \"\"\" if not self.dist_matrix: self._calc_dist_matrix(X) linkage", "pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False, binary=True)), ('k_best', SelectKBest(score_func=sklearn.feature_selection.chi2, k=k)), ('cluster', SelectHierarchicalClustering(threshold=threshold)),", "a trailing # underscore) and otherwise raises a NotFittedError with", "\"\"\" Calculate distance matrix between each two features in X,", "scipy.stats import chi2_contingency from sklearn.base import BaseEstimator from sklearn.ensemble import", "score versus y. The attribute self.n_features_ represents the number of", "by verifying the presence of fitted attributes (ending with a", "for retention. \"\"\" # Checks if the estimator is fitted", "confusion_matrix.sum().sum() corr = np.sqrt(chi2 / n) return corr def _calc_dist_matrix(self,", "and selects a feature from each cluster with the highest", "the SelectKBest features function threshold - clustering threshold for the", "using self.dist_matrix and self.threshold, and selects a feature from each", "return self def _get_support_mask(self): \"\"\" Get the boolean mask indicating", "linkage): \"\"\" Perform hierarchical clustering Parameters ---------- linkage - linkage", "list of lists, each list represents a cluster and contains", "a NotFittedError with the given message. sklearn.utils.validation.check_is_fitted(self) mask = np.zeros((self.n_features_,", "sklearn import pandas as pd import scipy.spatial.distance as ssd from", "the Hierarchial clustering random_state - random state for the RandomForestClassifier.", "the estimator is fitted by verifying the presence of fitted", "y column Returns ---------- phi coefficient value \"\"\" confusion_matrix =", "random state for the RandomForestClassifier. Deafult value: 0 Returns ----------", "which sample i belongs cluster_ids = hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx", "hierarchy.fcluster(linkage, self.threshold, criterion='distance') cluster_id_to_feature_idx = {} for idx, cluster_id in", "is 1-phi_correlation \"\"\" X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix", "---------- linkage - linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method) Returns", "each cluster with the highest chi2 score of X[feature] versus", "a feature from each cluster with the highest chi2 score", "feature is selected for retention. \"\"\" # Checks if the", "and otherwise raises a NotFittedError with the given message. sklearn.utils.validation.check_is_fitted(self)", "Perform hierarchical clustering Parameters ---------- linkage - linkage dendogram created", "retention. \"\"\" # Checks if the estimator is fitted by", "underscore) and otherwise raises a NotFittedError with the given message.", "cluster_id in enumerate(cluster_ids): cluster_id_to_feature_idx.setdefault(cluster_id, []).append(idx) return list(cluster_id_to_feature_idx.values()) def fit(self, X,", "RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin", "cluster with the highest chi2 score of X[feature] versus y", "number of features selected (=number of clusters) The attribute self.selected_features_", "corresponding feature is selected for retention. \"\"\" # Checks if", "estimator is fitted by verifying the presence of fitted attributes", "sklearn.feature_extraction.text import CountVectorizer from sklearn.feature_selection import SelectKBest, SelectorMixin from sklearn.pipeline", "SelectKBest features function threshold - clustering threshold for the Hierarchial", "---------- mask - boolean array of shape [# input features]", "column y - feature y column Returns ---------- phi coefficient", "threshold for the Hierarchial clustering random_state - random state for", "of X[feature] versus y \"\"\" def __init__(self, dist_matrix=None, threshold=1): self.dist_matrix", "\"\"\" # Checks if the estimator is fitted by verifying", "input features] An element is True iff its corresponding feature", "dist_matrix=None, threshold=1): self.dist_matrix = dist_matrix self.threshold = threshold def _phi_coef(self,", "hierarchical clustering Parameters ---------- linkage - linkage dendogram created by", "random_state=0): \"\"\" Creates feature selection pipeline Parameters ---------- k -", "feature y column Returns ---------- phi coefficient value \"\"\" confusion_matrix", "np.zeros((self.n_features_, ), dtype=bool) mask[self.selected_features_] = 1 return mask def get_fs_pipeline(k,", "hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a list of lists, each list", "represents the number of features selected (=number of clusters) The", "by hierarchy.linkage(self.distance_matrix, method=method) Returns ---------- a list of lists, each", "\"\"\" Perform hierarchical clustering Parameters ---------- linkage - linkage dendogram", "the indexes of features belonging to the cluster \"\"\" #", "features belonging to the cluster \"\"\" # array of len(X)", "verifying the presence of fitted attributes (ending with a trailing", "pipeline - feature selection pipeline \"\"\" pipeline = Pipeline(steps=[('vectorize', CountVectorizer(lowercase=False,", "Parameters ---------- linkage - linkage dendogram created by hierarchy.linkage(self.distance_matrix, method=method)", "from scipy.cluster import hierarchy from scipy.stats import chi2_contingency from sklearn.base", "coefficient between features Parameters ---------- x - feature x column", "selected Returns ---------- mask - boolean array of shape [#", "len(X) - array[i] is the cluster number to which sample", "x column y - feature y column Returns ---------- phi", "X_df = pd.DataFrame.sparse.from_spmatrix(X) X_corr_mat = X_df.corr(method=self._phi_coef) feature_corr_dist_matrix = 1 -", "ssd.squareform(feature_corr_dist_matrix) self.dist_matrix = feature_corr_dist_matrix_condensed def _corr_linkage(self, method='average'): linkage = hierarchy.linkage(self.dist_matrix,", "chi2_vals, __ = sklearn.feature_selection.chi2(X, y) chi2_vals = pd.Series(chi2_vals) # fitted", "selected for retention. \"\"\" # Checks if the estimator is", "/ n) return corr def _calc_dist_matrix(self, X): \"\"\" Calculate distance", "= pd.crosstab(x, y) chi2 = chi2_contingency(confusion_matrix)[0] n = confusion_matrix.sum().sum() corr", "the presence of fitted attributes (ending with a trailing #", "with the highest chi2 score of X[feature] versus y \"\"\"", "method=method) return linkage def _hierarchical_clustering(self, linkage): \"\"\" Perform hierarchical clustering", "scipy.spatial.distance as ssd from scipy.cluster import hierarchy from scipy.stats import", "parameter for the SelectKBest features function threshold - clustering threshold", "n) return corr def _calc_dist_matrix(self, X): \"\"\" Calculate distance matrix", "chi2 score of X[feature] versus y \"\"\" def __init__(self, dist_matrix=None,", "selected_features={len(self.selected_features_)}') return self def _get_support_mask(self): \"\"\" Get the boolean mask", "import BaseEstimator from sklearn.ensemble import RandomForestClassifier from sklearn.feature_extraction.text import CountVectorizer" ]
[ "result\", \"Download and extract additional data\", \"Demonstrate maximizing screen\", \"Demonstrate", "Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5): browser = webdriver.Safari()", "time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not supported in", "For convenient visual def returnVisibleElement(listOfInputElements): for element in listOfInputElements: if", "= browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input == 4): browser =", "'__main__': while(True): printSelection() choice = input('Enter choice: ') try: choice", "ValueError: print('Invalid input, stop program') break if(choice not in range(0,9)):", "browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input == 4): browser = webdriver.Safari()", "visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not", "too much time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input", "browser.maximize_window() # Required for the input tag visibility browser.get('https://trends.google.com/trends/') try:", "EXAMPLES = [\"Demonstrate unexpected use-case\", \"Demonstrate google search\", \"Demonstrate search", "from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions", "visibility browser.get(URL) # with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\"))", "with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are", "TimeoutException: print(\"Loading took too much time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google", "for item in dataList: text = item.text print(text) elif(input ==", "len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = ' ') if __name__ == '__main__':", "3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md')", "break if(choice not in range(0,9)): print('Invalid input, stop program') break", "\"Demonstrate google search\", \"Demonstrate search on thinkwithgoogle\", \"Demonstrate search on", "if(choice not in range(0,9)): print('Invalid input, stop program') break run(int(choice),", "sep = ' ') if __name__ == '__main__': while(True): printSelection()", "HTML formatting elif(input == 1): driver = webdriver.Safari() driver.get(\"https://www.google.com\") search", "item.text print(text) elif(input == 6): browser = webdriver.Safari() browser.maximize_window() #", "return element def printSelection(): print('Press:') for i in range(0, len(EXAMPLES)):", "browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser =", "TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading took", "ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser = webdriver.Safari() browser.maximize_window() #", "search on WebDriverWait\", \"Demonstrate search on thinkwithgoogle search result\", \"Download", "maximizing screen\", \"Demonstrate mouse actions for Chrome\", \"Demonstrate navigation\"] def", "webdriver.Safari() browser.maximize_window() # Required for the button visibility browser.get(URL) #", "button visibility browser.get(URL) # with visibility search time.sleep(2) element_to_hover_over =", "def printSelection(): print('Press:') for i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep", "visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser", "search.send_keys('Google <PASSWORD>') # Google Search \"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN)", "= ' ') if __name__ == '__main__': while(True): printSelection() choice", "browser = webdriver.Chrome() browser.maximize_window() # Required for the button visibility", "if __name__ == '__main__': while(True): printSelection() choice = input('Enter choice:", "search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList =", "\"Demonstrate maximizing screen\", \"Demonstrate mouse actions for Chrome\", \"Demonstrate navigation\"]", "navigation\"] def run(input, URL): if(input == 0): content = requests.get(URL)", "# Print row with HTML formatting elif(input == 1): driver", "with HTML formatting elif(input == 1): driver = webdriver.Safari() driver.get(\"https://www.google.com\")", "actions for Chrome\", \"Demonstrate navigation\"] def run(input, URL): if(input ==", "print('Invalid input, stop program') break if(choice not in range(0,9)): print('Invalid", "\"Download and extract additional data\", \"Demonstrate maximizing screen\", \"Demonstrate mouse", "data\", \"Demonstrate maximizing screen\", \"Demonstrate mouse actions for Chrome\", \"Demonstrate", "'input-254'))) except TimeoutException: print(\"Loading took too much time!\") search =", "# with visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel", "are not supported in safari but will work on other", "= int(choice) except ValueError: print('Invalid input, stop program') break if(choice", "button visibility browser.get(URL) # with visibility search time.sleep(2) search =", "elif(input == 7): browser = webdriver.Chrome() browser.maximize_window() # Required for", "__name__ == '__main__': while(True): printSelection() choice = input('Enter choice: ')", "element.is_displayed(): return element def printSelection(): print('Press:') for i in range(0,", "search.submit() elif(input == 2): browser = webdriver.Safari() browser.get(URL) time.sleep(5) search", "text = item.text print(text) browser.back() print('\\n' * 5) # For", "## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser = webdriver.Safari()", "Required for the button visibility browser.get(URL) # with visibility search", "and extract additional data\", \"Demonstrate maximizing screen\", \"Demonstrate mouse actions", "search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data", "# For convenient visual def returnVisibleElement(listOfInputElements): for element in listOfInputElements:", "selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import", "work on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input ==", "= [\"Demonstrate unexpected use-case\", \"Demonstrate google search\", \"Demonstrate search on", "= WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading took too much", "from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium", "search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\" search.submit() elif(input == 2): browser", "3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5): browser = webdriver.Safari() browser.maximize_window()", "download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item", "# with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains", "visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2)", "other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser", "Google Search \"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3):", "3): browser = webdriver.Safari() browser.maximize_window() # Required for the input", "data.find_elements_by_tag_name('li') for item in dataList: text = item.text print(text) browser.back()", "selenium import webdriver from selenium.webdriver.common.keys import Keys import time from", "selenium.webdriver.common.keys import Keys import time from selenium.webdriver.common.action_chains import ActionChains from", "== '__main__': while(True): printSelection() choice = input('Enter choice: ') try:", "but will work on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform()", "selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected", "print(text) elif(input == 6): browser = webdriver.Safari() browser.maximize_window() # Required", "for item in dataList: text = item.text print(text) browser.back() print('\\n'", "range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = ' ') if __name__ ==", "BeautifulSoup import requests import re from selenium.webdriver.support.ui import WebDriverWait from", "additional data\", \"Demonstrate maximizing screen\", \"Demonstrate mouse actions for Chrome\",", "search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input == 4): browser", "in dataList: text = item.text print(text) browser.back() print('\\n' * 5)", "with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7):", "time from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import TouchActions from", "if element is found within 3 seconds otherwise raise TimeoutException", "time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input == 4):", "5): browser = webdriver.Safari() browser.maximize_window() # Required for the button", "3 seconds otherwise raise TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254')))", "requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row with HTML", "elif(input == 1): driver = webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\")", "extract additional data\", \"Demonstrate maximizing screen\", \"Demonstrate mouse actions for", "webdriver.Safari() browser.get(URL) # with visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput'))", "time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li')", "the button visibility browser.get(URL) # with visibility search time.sleep(2) search", "webdriver.Chrome() browser.maximize_window() # Required for the button visibility browser.get(URL) #", "== 3): browser = webdriver.Safari() browser.maximize_window() # Required for the", "time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser = webdriver.Chrome()", "choice = input('Enter choice: ') try: choice = int(choice) except", "the input tag visibility browser.get('https://trends.google.com/trends/') try: # proceed if element", "from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC", "import webdriver from selenium.webdriver.common.keys import Keys import time from selenium.webdriver.common.action_chains", "google search\", \"Demonstrate search on thinkwithgoogle\", \"Demonstrate search on WebDriverWait\",", "listOfInputElements: if element.is_displayed(): return element def printSelection(): print('Press:') for i", "from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import TimeoutException URL =", "element in listOfInputElements: if element.is_displayed(): return element def printSelection(): print('Press:')", "browser.get(URL) # with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ##", "for Chrome\", \"Demonstrate navigation\"] def run(input, URL): if(input == 0):", "int(choice) except ValueError: print('Invalid input, stop program') break if(choice not", "import Keys import time from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions", "time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input", "time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item", "thinkwithgoogle search result\", \"Download and extract additional data\", \"Demonstrate maximizing", "search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5): browser =", "browser = webdriver.Safari() browser.maximize_window() # Required for the button visibility", "* 5) # For convenient visual def returnVisibleElement(listOfInputElements): for element", "requests import re from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import", "bs4 import BeautifulSoup import requests import re from selenium.webdriver.support.ui import", "print(\"Loading took too much time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel", "[\"Demonstrate unexpected use-case\", \"Demonstrate google search\", \"Demonstrate search on thinkwithgoogle\",", "URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected use-case\", \"Demonstrate google", "def returnVisibleElement(listOfInputElements): for element in listOfInputElements: if element.is_displayed(): return element", "search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser =", "search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data", "browser.back() print('\\n' * 5) # For convenient visual def returnVisibleElement(listOfInputElements):", "element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser = webdriver.Chrome() browser.maximize_window()", "in listOfInputElements: if element.is_displayed(): return element def printSelection(): print('Press:') for", "') if __name__ == '__main__': while(True): printSelection() choice = input('Enter", "\"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3): browser =", "proceed if element is found within 3 seconds otherwise raise", "import BeautifulSoup import requests import re from selenium.webdriver.support.ui import WebDriverWait", "from bs4 import BeautifulSoup import requests import re from selenium.webdriver.support.ui", "= webdriver.Safari() browser.maximize_window() # Required for the button visibility browser.get(URL)", "= webdriver.Safari() browser.maximize_window() # Required for the input tag visibility", "dataList: text = item.text print(text) elif(input == 6): browser =", "search.send_keys(Keys.ENTER) elif(input == 5): browser = webdriver.Safari() browser.maximize_window() # Required", "== 5): browser = webdriver.Safari() browser.maximize_window() # Required for the", "import TimeoutException from selenium.webdriver.common.by import By from selenium import webdriver", "= returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5):", "printSelection() choice = input('Enter choice: ') try: choice = int(choice)", "import By from selenium import webdriver from selenium.webdriver.common.keys import Keys", "import time from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import TouchActions", "browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google Search", "== 0): content = requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) #", "'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected use-case\", \"Demonstrate google search\", \"Demonstrate", "= webdriver.Safari() browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') #", "4): browser = webdriver.Safari() browser.get(URL) # with visibility search time.sleep(2)", "import TouchActions from selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES", "content = requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row", "input('Enter choice: ') try: choice = int(choice) except ValueError: print('Invalid", "otherwise raise TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException:", "# Required for the button visibility browser.get(URL) # with visibility", "= driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\" search.submit() elif(input ==", "took too much time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3')", "choice = int(choice) except ValueError: print('Invalid input, stop program') break", "program') break if(choice not in range(0,9)): print('Invalid input, stop program')", "supported in safari but will work on other browser ##", "def run(input, URL): if(input == 0): content = requests.get(URL) soup", "browser.get(URL) # with visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google", "will work on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input", "content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item in dataList: text =", "= data.find_elements_by_tag_name('li') for item in dataList: text = item.text print(text)", "elif(input == 8): browser = webdriver.Safari() browser.maximize_window() # Required for", "== 8): browser = webdriver.Safari() browser.maximize_window() # Required for the", "\"Selenium\" search.submit() elif(input == 2): browser = webdriver.Safari() browser.get(URL) time.sleep(5)", "except ValueError: print('Invalid input, stop program') break if(choice not in", "choice: ') try: choice = int(choice) except ValueError: print('Invalid input,", "search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data =", "data.find_elements_by_tag_name('li') for item in dataList: text = item.text print(text) elif(input", "import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions", "time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google Search \"Google", "= webdriver.Safari() browser.get(URL) # with visibility search time.sleep(2) search =", "time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2)", "import requests import re from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support", "dataList = data.find_elements_by_tag_name('li') for item in dataList: text = item.text", "2): browser = webdriver.Safari() browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google", "browser.get(URL) # with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input", "# Required for the input tag visibility browser.get('https://trends.google.com/trends/') try: #", "print('Press:') for i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = '", "print(text) browser.back() print('\\n' * 5) # For convenient visual def", "Chrome\", \"Demonstrate navigation\"] def run(input, URL): if(input == 0): content", "== 2): browser = webdriver.Safari() browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1]", "search.send_keys(Keys.RETURN) elif(input == 3): browser = webdriver.Safari() browser.maximize_window() # Required", "the button visibility browser.get(URL) # with visibility search time.sleep(2) element_to_hover_over", "7): browser = webdriver.Chrome() browser.maximize_window() # Required for the button", "Search \"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3): browser", "printSelection(): print('Press:') for i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep =", "\"Demonstrate navigation\"] def run(input, URL): if(input == 0): content =", "== 6): browser = webdriver.Safari() browser.maximize_window() # Required for the", "\"Demonstrate search on thinkwithgoogle search result\", \"Download and extract additional", "except TimeoutException: print(\"Loading took too much time!\") search = browser.find_elements(By.ID,'input-254')[0]", "mouse actions for Chrome\", \"Demonstrate navigation\"] def run(input, URL): if(input", "on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8):", "search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER)", "search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for", "= webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search", "browser.get('https://trends.google.com/trends/') try: # proceed if element is found within 3", "BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row with HTML formatting elif(input ==", "search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google Search \"Google Pixel", "= returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click()", "Keys import time from selenium.webdriver.common.action_chains import ActionChains from selenium.webdriver.common.touch_actions import", "from selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate", "with visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3')", "Pixel 3') elif(input == 4): browser = webdriver.Safari() browser.get(URL) #", "Required for the input tag visibility browser.get('https://trends.google.com/trends/') try: # proceed", "item in dataList: text = item.text print(text) elif(input == 6):", "seconds otherwise raise TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except", "if element.is_displayed(): return element def printSelection(): print('Press:') for i in", "for the input tag visibility browser.get('https://trends.google.com/trends/') try: # proceed if", "on thinkwithgoogle search result\", \"Download and extract additional data\", \"Demonstrate", "= 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected use-case\", \"Demonstrate google search\",", "browser = webdriver.Safari() browser.maximize_window() # Required for the input tag", "elif(input == 4): browser = webdriver.Safari() browser.get(URL) # with visibility", "search result\", \"Download and extract additional data\", \"Demonstrate maximizing screen\",", "= webdriver.Chrome() browser.maximize_window() # Required for the button visibility browser.get(URL)", "Print row with HTML formatting elif(input == 1): driver =", "for i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = ' ')", "<PASSWORD>') # Google Search \"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input", "from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException", "Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList", "print('\\n' * 5) # For convenient visual def returnVisibleElement(listOfInputElements): for", "\"Demonstrate search on thinkwithgoogle\", \"Demonstrate search on WebDriverWait\", \"Demonstrate search", "driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\" search.submit()", "item in dataList: text = item.text print(text) browser.back() print('\\n' *", "By from selenium import webdriver from selenium.webdriver.common.keys import Keys import", "print('',i,'to',EXAMPLES[i], sep = ' ') if __name__ == '__main__': while(True):", "3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3): browser = webdriver.Safari() browser.maximize_window()", "search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md')", "= BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row with HTML formatting elif(input", "== 1): driver = webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\")", "element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading took too", "from selenium import webdriver from selenium.webdriver.common.keys import Keys import time", "import re from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions", "visual def returnVisibleElement(listOfInputElements): for element in listOfInputElements: if element.is_displayed(): return", "selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from selenium import", "# proceed if element is found within 3 seconds otherwise", "within 3 seconds otherwise raise TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID,", "not in range(0,9)): print('Invalid input, stop program') break run(int(choice), URL)", "browser = webdriver.Safari() browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>')", "browser.maximize_window() # Required for the button visibility browser.get(URL) # with", "= returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser = webdriver.Chrome() browser.maximize_window() #", "== 7): browser = webdriver.Chrome() browser.maximize_window() # Required for the", "returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5): browser", "= browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google Search \"Google Pixel 3\"", "print(soup.prettify()) # Print row with HTML formatting elif(input == 1):", "EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By from", "tag visibility browser.get('https://trends.google.com/trends/') try: # proceed if element is found", "= returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not supported in safari but", "## ActionChains are not supported in safari but will work", "import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected use-case\",", "webdriver.Safari() browser.get(URL) time.sleep(5) search = browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google", "webdriver from selenium.webdriver.common.keys import Keys import time from selenium.webdriver.common.action_chains import", "in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = ' ') if __name__", "search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\" search.submit() elif(input", "returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input == 7): browser = webdriver.Chrome() browser.maximize_window() # Required", "# Google Search \"Selenium\" search.submit() elif(input == 2): browser =", "row with HTML formatting elif(input == 1): driver = webdriver.Safari()", "5) # For convenient visual def returnVisibleElement(listOfInputElements): for element in", "1): driver = webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") #", "search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not supported", "elif(input == 2): browser = webdriver.Safari() browser.get(URL) time.sleep(5) search =", "try: # proceed if element is found within 3 seconds", "TouchActions from selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES =", "use-case\", \"Demonstrate google search\", \"Demonstrate search on thinkwithgoogle\", \"Demonstrate search", "returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data", "soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row with HTML formatting", "raise TimeoutException element = WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading", "time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList", "is found within 3 seconds otherwise raise TimeoutException element =", "Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content", "unexpected use-case\", \"Demonstrate google search\", \"Demonstrate search on thinkwithgoogle\", \"Demonstrate", "visibility browser.get('https://trends.google.com/trends/') try: # proceed if element is found within", "try: choice = int(choice) except ValueError: print('Invalid input, stop program')", "Google Search \"Selenium\" search.submit() elif(input == 2): browser = webdriver.Safari()", "thinkwithgoogle\", \"Demonstrate search on WebDriverWait\", \"Demonstrate search on thinkwithgoogle search", "3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList =", "element def printSelection(): print('Press:') for i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i],", "elif(input == 6): browser = webdriver.Safari() browser.maximize_window() # Required for", "time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3): browser = webdriver.Safari() browser.maximize_window() #", "returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not supported in safari but will", "driver = webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google", "import expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by", "= input('Enter choice: ') try: choice = int(choice) except ValueError:", "screen\", \"Demonstrate mouse actions for Chrome\", \"Demonstrate navigation\"] def run(input,", "WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import", "search on thinkwithgoogle search result\", \"Download and extract additional data\",", "0): content = requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print", "' ') if __name__ == '__main__': while(True): printSelection() choice =", "search on thinkwithgoogle\", \"Demonstrate search on WebDriverWait\", \"Demonstrate search on", "time.sleep(2) browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li')", "Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input == 3): browser = webdriver.Safari()", "search\", \"Demonstrate search on thinkwithgoogle\", \"Demonstrate search on WebDriverWait\", \"Demonstrate", "element is found within 3 seconds otherwise raise TimeoutException element", "Search \"Selenium\" search.submit() elif(input == 2): browser = webdriver.Safari() browser.get(URL)", "in dataList: text = item.text print(text) elif(input == 6): browser", "elif(input == 5): browser = webdriver.Safari() browser.maximize_window() # Required for", "= item.text print(text) browser.back() print('\\n' * 5) # For convenient", "formatting elif(input == 1): driver = webdriver.Safari() driver.get(\"https://www.google.com\") search =", "WebDriverWait(browser, 3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading took too much time!\")", "time.sleep(2) search.send_keys(Keys.ENTER) elif(input == 5): browser = webdriver.Safari() browser.maximize_window() #", "import ActionChains from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import TimeoutException", "TimeoutException URL = 'https://shopping.thinkwithgoogle.com' EXAMPLES = [\"Demonstrate unexpected use-case\", \"Demonstrate", "3).until(EC.presence_of_element_located((By.ID, 'input-254'))) except TimeoutException: print(\"Loading took too much time!\") search", "i in range(0, len(EXAMPLES)): print('',i,'to',EXAMPLES[i], sep = ' ') if", "from selenium.webdriver.common.keys import Keys import time from selenium.webdriver.common.action_chains import ActionChains", "much time!\") search = browser.find_elements(By.ID,'input-254')[0] search.send_keys('Google Pixel 3') elif(input ==", "dataList: text = item.text print(text) browser.back() print('\\n' * 5) #", "input tag visibility browser.get('https://trends.google.com/trends/') try: # proceed if element is", "expected_conditions as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import", "text = item.text print(text) elif(input == 6): browser = webdriver.Safari()", "# Google Search \"Google Pixel 3\" time.sleep(5) search.send_keys(Keys.RETURN) elif(input ==", "on WebDriverWait\", \"Demonstrate search on thinkwithgoogle search result\", \"Download and", "while(True): printSelection() choice = input('Enter choice: ') try: choice =", "webdriver.Safari() browser.maximize_window() # Required for the input tag visibility browser.get('https://trends.google.com/trends/')", "6): browser = webdriver.Safari() browser.maximize_window() # Required for the button", "') try: choice = int(choice) except ValueError: print('Invalid input, stop", "search.send_keys('Google Pixel 3') elif(input == 4): browser = webdriver.Safari() browser.get(URL)", "browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item in dataList: text", "convenient visual def returnVisibleElement(listOfInputElements): for element in listOfInputElements: if element.is_displayed():", "# with visibility search time.sleep(2) element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) elif(input ==", "= browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item in dataList:", "run(input, URL): if(input == 0): content = requests.get(URL) soup =", "if(input == 0): content = requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify())", "selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import TimeoutException URL = 'https://shopping.thinkwithgoogle.com'", "as EC from selenium.common.exceptions import TimeoutException from selenium.webdriver.common.by import By", "ActionChains are not supported in safari but will work on", "browser.find_elements_by_id('subjectInput')[1] search.send_keys('Google <PASSWORD>') # Google Search \"Google Pixel 3\" time.sleep(5)", "= returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data =", "search = returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) elif(input ==", "TimeoutException from selenium.webdriver.common.by import By from selenium import webdriver from", "stop program') break if(choice not in range(0,9)): print('Invalid input, stop", "input, stop program') break if(choice not in range(0,9)): print('Invalid input,", "element_to_hover_over = returnVisibleElement(browser.find_elements_by_xpath(\"//i[@class='material-icons'][contains(./text(),'help')]\")) ## ActionChains are not supported in safari", "on thinkwithgoogle\", \"Demonstrate search on WebDriverWait\", \"Demonstrate search on thinkwithgoogle", "selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from", "\"Demonstrate search on WebDriverWait\", \"Demonstrate search on thinkwithgoogle search result\",", "ActionChains from selenium.webdriver.common.touch_actions import TouchActions from selenium.common.exceptions import TimeoutException URL", "= requests.get(URL) soup = BeautifulSoup(content.text,'html.parser') print(soup.prettify()) # Print row with", "WebDriverWait\", \"Demonstrate search on thinkwithgoogle search result\", \"Download and extract", "elif(input == 3): browser = webdriver.Safari() browser.maximize_window() # Required for", "selenium.webdriver.common.by import By from selenium import webdriver from selenium.webdriver.common.keys import", "https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform() TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser = webdriver.Safari() browser.maximize_window()", "in safari but will work on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136", "driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\" search.submit() elif(input == 2):", "selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException from", "item.text print(text) browser.back() print('\\n' * 5) # For convenient visual", "= item.text print(text) elif(input == 6): browser = webdriver.Safari() browser.maximize_window()", "3') elif(input == 4): browser = webdriver.Safari() browser.get(URL) # with", "returnVisibleElement(listOfInputElements): for element in listOfInputElements: if element.is_displayed(): return element def", "from selenium.webdriver.common.by import By from selenium import webdriver from selenium.webdriver.common.keys", "8): browser = webdriver.Safari() browser.maximize_window() # Required for the button", "\"Demonstrate mouse actions for Chrome\", \"Demonstrate navigation\"] def run(input, URL):", "TouchActions(browser).long_press(element_to_hover_over).perform() elif(input == 8): browser = webdriver.Safari() browser.maximize_window() # Required", "visibility browser.get(URL) # with visibility search time.sleep(2) search = returnVisibleElement(browser.find_elements_by_id('subjectInput'))", "webdriver.Safari() driver.get(\"https://www.google.com\") search = driver.find_element_by_name(\"q\") search.send_keys(\"Sel<PASSWORD>\") # Google Search \"Selenium\"", "== 4): browser = webdriver.Safari() browser.get(URL) # with visibility search", "URL): if(input == 0): content = requests.get(URL) soup = BeautifulSoup(content.text,'html.parser')", "data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for item in", "returnVisibleElement(browser.find_elements_by_id('subjectInput')) search.send_keys('Google Pixel 3') time.sleep(2) search.send_keys(Keys.ENTER) time.sleep(2) data = browser.find_element_by_class_name('content", "browser.find_element_by_class_name('si-button-data download-all').click() data = browser.find_element_by_class_name('content content-breakpoint-gt-md') dataList = data.find_elements_by_tag_name('li') for", "safari but will work on other browser ## https://github.com/seleniumhq/selenium-google-code-issue-archive/issues/4136 ActionChains(browser).click(element_to_hover_over).perform()", "for element in listOfInputElements: if element.is_displayed(): return element def printSelection():", "re from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as", "not supported in safari but will work on other browser", "found within 3 seconds otherwise raise TimeoutException element = WebDriverWait(browser,", "for the button visibility browser.get(URL) # with visibility search time.sleep(2)", "browser = webdriver.Safari() browser.get(URL) # with visibility search time.sleep(2) search" ]
[ "line has to be handled in a special way and", "data\"\"\" LOG(\"*** receiveCallback ***\") # read the next set of", "2018, <NAME>, Austria * # * # The Space Python", "more details. * #****************************************************************************** # Unit Tests * #****************************************************************************** import", "has send data\"\"\" LOG(\"*** receiveCallback ***\") # read the next", "the rest should come with the next read self.tcpLineBuffer =", "activate zyclic idle function idleFunction() # ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000,", "[\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def createServer(): \"\"\"create the", "PARTICULAR PURPOSE. See the MIT License * # for more", "it under under the terms of the MIT License as", "LINEBUFFERLEN = 256 ########### # classes # ########### # =============================================================================", "the console handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP server LOG(\"Open", "terminate the client connection self.disconnectClient(); return if (upperLine == \"Q\")", "by the * # Massachusetts Institute of Technology. * #", "or FITNESS FOR A PARTICULAR PURPOSE. See the MIT License", "* # The Space Python Library is free software; you", "* # The Space Python Library is distributed in the", "LOG(\"line = \" + line) return 0 ############# # functions", "return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" +", "#****************************************************************************** # (C) 2018, <NAME>, Austria * # * #", "stateMask): \"\"\"Callback when a client has send data\"\"\" LOG(\"*** receiveCallback", "telnet if line[-1] == \"\\r\": line = line[:-1] # terminate", "* # Massachusetts Institute of Technology. * # * #", "import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants # ############# LINEBUFFERLEN", "LOG(\"Exit requested\") # send the OK response back to the", "Library is free software; you can redistribute it and/or *", "idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\") ######## # main #", "(incl. \"\\n\") pass else: # last line was cutt off", "the client connection if exit has been entered (case insensitive)", "# ######## if __name__ == \"__main__\": # initialise the system", "----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"],", "0: LOG(\"OK\") # send the OK response back to the", "OK response back to the TECO retString = \"OK\\n\"; self.send(retString.encode())", "constants # ############# LINEBUFFERLEN = 256 ########### # classes #", "be # processed directly lastLine = lines[-1] lines = lines[:-1]", "the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the console handler", "<NAME>, Austria * # * # The Space Python Library", "LOG(\"--- idle ---\") ######## # main # ######## if __name__", "of the MIT License as published by the * #", "self.disconnectClient(); return if (upperLine == \"Q\") or (upperLine == \"QUIT\"):", "idleFunction() # ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\")", "initConfiguration() # initialise the console handler consoleHandler = UTIL.TASK.ConsoleHandler() #", "+= data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer) # handle the input:", "\"\" # --------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask): \"\"\"Callback when a", "upperLine = line.upper() if (upperLine == \"X\") or (upperLine ==", "configuration initConfiguration() # initialise the console handler consoleHandler = UTIL.TASK.ConsoleHandler()", "input: extract the lines from the line buffer lines =", "# --------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask): \"\"\"Callback when a client", "for clients like telnet if line[-1] == \"\\r\": line =", "in a special way and can not be # processed", "= line[:-1] # terminate the client connection if exit has", "MIT License as published by the * # Massachusetts Institute", "receiveCallback(self, socket, stateMask): \"\"\"Callback when a client has send data\"\"\"", "lines = lines[:-1] if lastLine == \"\": # read of", "TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) #", "Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP #############", "The Space Python Library is free software; you can redistribute", "a data line\"\"\" LOG(\"line = \" + line) return 0", "LOG_ERROR(str(pstatus)) # set the Error response back to the client:", "not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle function idleFunction() #", "TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise", "connection self.disconnectClient(); sys.exit(0) # delegate the input pstatus = self.processLine(line);", "line = line[:-1] # terminate the client connection if exit", "client connection if exit has been entered (case insensitive) upperLine", "in lines: # remove a terminating \"\\r\" for clients like", "model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the console handler modelTask.registerConsoleHandler(consoleHandler)", "\"\"\"Callback when a client has send data\"\"\" LOG(\"*** receiveCallback ***\")", "server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate", "createServer(): \"\"\"create the TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not", "disconnected return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \"", "terminate the client connection if exit has been entered (case", "if pstatus == 0: LOG(\"OK\") # send the OK response", "the input: extract the lines from the line buffer lines", "functions # ############# # ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the system", "line\"\"\" LOG(\"line = \" + line) return 0 ############# #", "== 0: LOG(\"OK\") # send the OK response back to", "TCP server\") createServer() # start the tasks LOG(\"start modelTask...\") modelTask.start()", "delegate the input pstatus = self.processLine(line); if pstatus == 0:", "distributed in the hope that it will be useful, *", "modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the console handler modelTask.registerConsoleHandler(consoleHandler) #", "the next set of byte from the data socket data", "to be handled in a special way and can not", "== \"QUIT\"): LOG(\"Quit requested\") # send the OK response back", "handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the model modelTask =", "retString = \"Error: execution failed (see log)!\\n\" self.send(retString.encode()) # ---------------------------------------------------------------------------", "classes # ########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\"", "(upperLine == \"X\") or (upperLine == \"EXIT\"): LOG(\"Exit requested\") #", "0 ############# # functions # ############# # ----------------------------------------------------------------------------- def initConfiguration():", "client retString = \"OK\\n\" self.send(retString.encode()) # terminate the client connection", "--------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback when a client has send", "--------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask", "# main # ######## if __name__ == \"__main__\": # initialise", "* # for more details. * #****************************************************************************** # Unit Tests", "# terminate the client connection if exit has been entered", "__init__(self, portNr): \"\"\"Initialise attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask,", "lastLine == \"\": # read of the data was complete", "pass else: # last line was cutt off and the", "# handle the input: extract the lines from the line", "only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer = \"\"", "== \"__main__\": # initialise the system configuration initConfiguration() # initialise", "from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS,", "response back to the client retString = \"OK\\n\" self.send(retString.encode()) #", "data line\"\"\" LOG(\"line = \" + line) return 0 #############", "tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer) # handle the", "\"\\r\" for clients like telnet if line[-1] == \"\\r\": line", "# initialise the console handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise", "\"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) #", "pstatus = self.processLine(line); if pstatus == 0: LOG(\"OK\") # send", "server LOG(\"Open the TCP server\") createServer() # start the tasks", "rest should come with the next read self.tcpLineBuffer = lastLine", "last line was cutt off and the rest should come", "= \"OK\\n\" self.send(retString.encode()) # terminate the client connection self.disconnectClient(); sys.exit(0)", "failed (see log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback", "has been entered (case insensitive) upperLine = line.upper() if (upperLine", "sys.exit(-1) # activate zyclic idle function idleFunction() # ----------------------------------------------------------------------------- def", "can not be # processed directly lastLine = lines[-1] lines", "connection if exit has been entered (case insensitive) upperLine =", "a client has send data\"\"\" LOG(\"*** receiveCallback ***\") # read", "it and/or * # modify it under under the terms", "the MIT License as published by the * # Massachusetts", "# ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\",", "of byte from the data socket data = self.recv(LINEBUFFERLEN) if", "# activate zyclic idle function idleFunction() # ----------------------------------------------------------------------------- def idleFunction():", "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the MIT", "consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True)", "* # but WITHOUT ANY WARRANTY; without even the implied", "# read the next set of byte from the data", "the lines from the line buffer lines = tcpLineBuffer.split(\"\\n\") #", "\"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def createServer(): \"\"\"create the TCP", "= UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer = \"\" # ---------------------------------------------------------------------------", "lines[:-1] if lastLine == \"\": # read of the data", "for line in lines: # remove a terminating \"\\r\" for", "\"QUIT\"): LOG(\"Quit requested\") # send the OK response back to", "automatically disconnected return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer:", "idle ---\") ######## # main # ######## if __name__ ==", "input pstatus = self.processLine(line); if pstatus == 0: LOG(\"OK\") #", "was cutt off and the rest should come with the", "of Technology. * # * # The Space Python Library", "WARRANTY; without even the implied warranty of * # MERCHANTABILITY", "warranty of * # MERCHANTABILITY or FITNESS FOR A PARTICULAR", "============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self,", "# for more details. * #****************************************************************************** # Unit Tests *", "free software; you can redistribute it and/or * # modify", "# ----------------------------------------------------------------------------- def createServer(): \"\"\"create the TCP server\"\"\" server =", "# send the OK response back to the client retString", "Library is distributed in the hope that it will be", "# client is automatically disconnected return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer", "self.disconnectClient(); sys.exit(0) # delegate the input pstatus = self.processLine(line); if", "self.send(retString.encode()) # terminate the client connection self.disconnectClient(); return if (upperLine", "under under the terms of the MIT License as published", "log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback when a", "UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise attributes only\"\"\" modelTask", "\"__main__\": # initialise the system configuration initConfiguration() # initialise the", "+ line) return 0 ############# # functions # ############# #", "retString = \"OK\\n\" self.send(retString.encode()) # terminate the client connection self.disconnectClient();", "lines = tcpLineBuffer.split(\"\\n\") # the last line has to be", "a terminating \"\\r\" for clients like telnet if line[-1] ==", "UTIL.TASK.ProcessingTask(isParent=True) # register the console handler modelTask.registerConsoleHandler(consoleHandler) # create the", "# ########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" #", "# initialise the system configuration initConfiguration() # initialise the console", "data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer) # handle the input: extract", "256 ########### # classes # ########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer):", "* # modify it under under the terms of the", "\"Q\") or (upperLine == \"QUIT\"): LOG(\"Quit requested\") # send the", "############# LINEBUFFERLEN = 256 ########### # classes # ########### #", "come with the next read self.tcpLineBuffer = lastLine for line", "[\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def createServer(): \"\"\"create the TCP server\"\"\"", "handled in a special way and can not be #", "\"Error: execution failed (see log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self,", "def __init__(self, portNr): \"\"\"Initialise attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self,", "way and can not be # processed directly lastLine =", "and can not be # processed directly lastLine = lines[-1]", "execution failed (see log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self, line):", "without even the implied warranty of * # MERCHANTABILITY or", "client connection self.disconnectClient(); sys.exit(0) # delegate the input pstatus =", "# read of the data was complete (incl. \"\\n\") pass", "\"\"\"Callback when a client has send a data line\"\"\" LOG(\"line", "socket data = self.recv(LINEBUFFERLEN) if data == None: # client", "UTIL.TASK, UTIL.TCP ############# # constants # ############# LINEBUFFERLEN = 256", "receiveCallback ***\") # read the next set of byte from", "lastLine for line in lines: # remove a terminating \"\\r\"", "published by the * # Massachusetts Institute of Technology. *", "has to be handled in a special way and can", "was complete (incl. \"\\n\") pass else: # last line was", "send the OK response back to the client retString =", "ANY WARRANTY; without even the implied warranty of * #", "LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants #", "clients like telnet if line[-1] == \"\\r\": line = line[:-1]", "data == None: # client is automatically disconnected return tcpLineBuffer", "# The Space Python Library is distributed in the hope", "# create the TCP server LOG(\"Open the TCP server\") createServer()", "# register the console handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP", "# but WITHOUT ANY WARRANTY; without even the implied warranty", "import sys from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR", "# Massachusetts Institute of Technology. * # * # The", "system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def", "Python Library is distributed in the hope that it will", "UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def createServer(): \"\"\"create", "lines: # remove a terminating \"\\r\" for clients like telnet", "self.send(retString.encode()) # terminate the client connection self.disconnectClient(); sys.exit(0) # delegate", "# remove a terminating \"\\r\" for clients like telnet if", "----------------------------------------------------------------------------- def createServer(): \"\"\"create the TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT))", "# functions # ############# # ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the", "the system configuration initConfiguration() # initialise the console handler consoleHandler", "Institute of Technology. * # * # The Space Python", "byte from the data socket data = self.recv(LINEBUFFERLEN) if data", "should come with the next read self.tcpLineBuffer = lastLine for", "LOG(\"Open the TCP server\") createServer() # start the tasks LOG(\"start", "when a client has send a data line\"\"\" LOG(\"line =", "# (C) 2018, <NAME>, Austria * # * # The", "portNr) self.tcpLineBuffer = \"\" # --------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask):", "LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# #", "portNr): \"\"\"Initialise attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr)", "buffer lines = tcpLineBuffer.split(\"\\n\") # the last line has to", "Austria * # * # The Space Python Library is", "initConfiguration(): \"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]])", "class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self, portNr):", "\"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise attributes", "server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle function idleFunction() # -----------------------------------------------------------------------------", "else: LOG_ERROR(str(pstatus)) # set the Error response back to the", "* #****************************************************************************** import sys from UTIL.SYS import Error, LOG, LOG_INFO,", "# * # The Space Python Library is free software;", "directly lastLine = lines[-1] lines = lines[:-1] if lastLine ==", "client: retString = \"Error: execution failed (see log)!\\n\" self.send(retString.encode()) #", "if __name__ == \"__main__\": # initialise the system configuration initConfiguration()", "* # * # The Space Python Library is free", "the input pstatus = self.processLine(line); if pstatus == 0: LOG(\"OK\")", "will be useful, * # but WITHOUT ANY WARRANTY; without", "response back to the client: retString = \"Error: execution failed", "the * # Massachusetts Institute of Technology. * # *", "back to the TECO retString = \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus))", "line was cutt off and the rest should come with", "under the terms of the MIT License as published by", "# last line was cutt off and the rest should", "is automatically disconnected return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\")", "def createServer(): \"\"\"create the TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if", "of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise attributes only\"\"\"", "processLine(self, line): \"\"\"Callback when a client has send a data", "the data was complete (incl. \"\\n\") pass else: # last", "# terminate the client connection self.disconnectClient(); sys.exit(0) # delegate the", "response back to the TECO retString = \"OK\\n\"; self.send(retString.encode()) else:", "+ tcpLineBuffer) # handle the input: extract the lines from", "self.tcpLineBuffer = \"\" # --------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask): \"\"\"Callback", "register the console handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP server", "handle the input: extract the lines from the line buffer", "lines from the line buffer lines = tcpLineBuffer.split(\"\\n\") # the", "as published by the * # Massachusetts Institute of Technology.", "########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # ---------------------------------------------------------------------------", "client connection self.disconnectClient(); return if (upperLine == \"Q\") or (upperLine", "line[-1] == \"\\r\": line = line[:-1] # terminate the client", "############# # constants # ############# LINEBUFFERLEN = 256 ########### #", "\" + tcpLineBuffer) # handle the input: extract the lines", "if (upperLine == \"X\") or (upperLine == \"EXIT\"): LOG(\"Exit requested\")", "the TCP server\") createServer() # start the tasks LOG(\"start modelTask...\")", "send data\"\"\" LOG(\"*** receiveCallback ***\") # read the next set", "complete (incl. \"\\n\") pass else: # last line was cutt", "return 0 ############# # functions # ############# # ----------------------------------------------------------------------------- def", "The Space Python Library is distributed in the hope that", "######## if __name__ == \"__main__\": # initialise the system configuration", "A PARTICULAR PURPOSE. See the MIT License * # for", "the client retString = \"OK\\n\" self.send(retString.encode()) # terminate the client", "(upperLine == \"Q\") or (upperLine == \"QUIT\"): LOG(\"Quit requested\") #", "if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle function idleFunction()", "from the line buffer lines = tcpLineBuffer.split(\"\\n\") # the last", "socket, stateMask): \"\"\"Callback when a client has send data\"\"\" LOG(\"***", "def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\") ######## # main", "the OK response back to the client retString = \"OK\\n\"", "or (upperLine == \"EXIT\"): LOG(\"Exit requested\") # send the OK", "tcpLineBuffer) # handle the input: extract the lines from the", "is free software; you can redistribute it and/or * #", "off and the rest should come with the next read", "Error response back to the client: retString = \"Error: execution", "\"1234\"]]) # ----------------------------------------------------------------------------- def createServer(): \"\"\"create the TCP server\"\"\" server", "self.processLine(line); if pstatus == 0: LOG(\"OK\") # send the OK", "insensitive) upperLine = line.upper() if (upperLine == \"X\") or (upperLine", "\"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set the Error response back", "# initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the", "# modify it under under the terms of the MIT", "############# # ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([", "== \"Q\") or (upperLine == \"QUIT\"): LOG(\"Quit requested\") # send", "== None: # client is automatically disconnected return tcpLineBuffer =", "Technology. * # * # The Space Python Library is", "the Error response back to the client: retString = \"Error:", "if line[-1] == \"\\r\": line = line[:-1] # terminate the", "attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer =", "= \" + line) return 0 ############# # functions #", "you can redistribute it and/or * # modify it under", "# ############# LINEBUFFERLEN = 256 ########### # classes # ###########", "lines[-1] lines = lines[:-1] if lastLine == \"\": # read", "client has send a data line\"\"\" LOG(\"line = \" +", "modify it under under the terms of the MIT License", "(C) 2018, <NAME>, Austria * # * # The Space", "the line buffer lines = tcpLineBuffer.split(\"\\n\") # the last line", "send a data line\"\"\" LOG(\"line = \" + line) return", "read self.tcpLineBuffer = lastLine for line in lines: # remove", "= 256 ########### # classes # ########### # ============================================================================= class", "# --------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback when a client has", "= \"Error: execution failed (see log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def", "import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP", "a client has send a data line\"\"\" LOG(\"line = \"", "TECO retString = \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set the", "self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback when a client", "\"\"\"create the TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST):", "client has send data\"\"\" LOG(\"*** receiveCallback ***\") # read the", "Space Python Library is distributed in the hope that it", "hope that it will be useful, * # but WITHOUT", "self.tcpLineBuffer = lastLine for line in lines: # remove a", "LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants # #############", "if exit has been entered (case insensitive) upperLine = line.upper()", "(see log)!\\n\" self.send(retString.encode()) # --------------------------------------------------------------------------- def processLine(self, line): \"\"\"Callback when", "LOG(\"OK\") # send the OK response back to the TECO", "= \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set the Error response", "= \"\" # --------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask): \"\"\"Callback when", "Python Library is free software; you can redistribute it and/or", "to the TECO retString = \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) #", "back to the client retString = \"OK\\n\" self.send(retString.encode()) # terminate", "create the TCP server LOG(\"Open the TCP server\") createServer() #", "\"EXIT\"): LOG(\"Exit requested\") # send the OK response back to", "line[:-1] # terminate the client connection if exit has been", "= tcpLineBuffer.split(\"\\n\") # the last line has to be handled", "WITHOUT ANY WARRANTY; without even the implied warranty of *", "line.upper() if (upperLine == \"X\") or (upperLine == \"EXIT\"): LOG(\"Exit", "the TCP server\"\"\" server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1)", "pstatus == 0: LOG(\"OK\") # send the OK response back", "the TECO retString = \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set", "--------------------------------------------------------------------------- def receiveCallback(self, socket, stateMask): \"\"\"Callback when a client has", "# the last line has to be handled in a", "idle function idleFunction() # ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"---", "Massachusetts Institute of Technology. * # * # The Space", "* # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See", "been entered (case insensitive) upperLine = line.upper() if (upperLine ==", "---\") ######## # main # ######## if __name__ == \"__main__\":", "== \"\": # read of the data was complete (incl.", "of * # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.", "modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer = \"\" #", "= self.processLine(line); if pstatus == 0: LOG(\"OK\") # send the", "retString = \"OK\\n\"; self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set the Error", "implied warranty of * # MERCHANTABILITY or FITNESS FOR A", "else: # last line was cutt off and the rest", "FITNESS FOR A PARTICULAR PURPOSE. See the MIT License *", "the implied warranty of * # MERCHANTABILITY or FITNESS FOR", "# classes # ########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of", "(upperLine == \"QUIT\"): LOG(\"Quit requested\") # send the OK response", "requested\") # send the OK response back to the client", "set the Error response back to the client: retString =", "next set of byte from the data socket data =", "Tests * #****************************************************************************** import sys from UTIL.SYS import Error, LOG,", "UTIL.TCP ############# # constants # ############# LINEBUFFERLEN = 256 ###########", "sys from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import", "set of byte from the data socket data = self.recv(LINEBUFFERLEN)", "# processed directly lastLine = lines[-1] lines = lines[:-1] if", "# ############# # ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise the system configuration\"\"\"", "processed directly lastLine = lines[-1] lines = lines[:-1] if lastLine", "UTIL.TASK.ConsoleHandler() # initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register", "terminating \"\\r\" for clients like telnet if line[-1] == \"\\r\":", "# delegate the input pstatus = self.processLine(line); if pstatus ==", "configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # ----------------------------------------------------------------------------- def createServer():", "the terms of the MIT License as published by the", "\"\\r\": line = line[:-1] # terminate the client connection if", "the client connection self.disconnectClient(); return if (upperLine == \"Q\") or", "the data socket data = self.recv(LINEBUFFERLEN) if data == None:", "self.send(retString.encode()) else: LOG_ERROR(str(pstatus)) # set the Error response back to", "the OK response back to the TECO retString = \"OK\\n\";", "be handled in a special way and can not be", "when a client has send data\"\"\" LOG(\"*** receiveCallback ***\") #", "with the next read self.tcpLineBuffer = lastLine for line in", "sys.exit(0) # delegate the input pstatus = self.processLine(line); if pstatus", "self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer) # handle", "\"\": # read of the data was complete (incl. \"\\n\")", "tcpLineBuffer.split(\"\\n\") # the last line has to be handled in", "the client connection self.disconnectClient(); sys.exit(0) # delegate the input pstatus", "for more details. * #****************************************************************************** # Unit Tests * #******************************************************************************", "that it will be useful, * # but WITHOUT ANY", "the client: retString = \"Error: execution failed (see log)!\\n\" self.send(retString.encode())", "* # * # The Space Python Library is distributed", "redistribute it and/or * # modify it under under the", "initialise the console handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the", "__name__ == \"__main__\": # initialise the system configuration initConfiguration() #", "modelTask.registerConsoleHandler(consoleHandler) # create the TCP server LOG(\"Open the TCP server\")", "UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\") ######## # main # ########", "== \"EXIT\"): LOG(\"Exit requested\") # send the OK response back", "* #****************************************************************************** # Unit Tests * #****************************************************************************** import sys from", "the TCP server LOG(\"Open the TCP server\") createServer() # start", "the MIT License * # for more details. * #******************************************************************************", "back to the client: retString = \"Error: execution failed (see", "python3 #****************************************************************************** # (C) 2018, <NAME>, Austria * # *", "# constants # ############# LINEBUFFERLEN = 256 ########### # classes", "= self.recv(LINEBUFFERLEN) if data == None: # client is automatically", "LOG(\"tcpLineBuffer: \" + tcpLineBuffer) # handle the input: extract the", "useful, * # but WITHOUT ANY WARRANTY; without even the", "line buffer lines = tcpLineBuffer.split(\"\\n\") # the last line has", "== \"\\r\": line = line[:-1] # terminate the client connection", "PURPOSE. See the MIT License * # for more details.", "LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants", "= UTIL.TASK.ConsoleHandler() # initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) #", "console handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP server LOG(\"Open the", "----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\") ######## #", "UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING, LOG_ERROR import UTIL.SYS, UTIL.TASK,", "def processLine(self, line): \"\"\"Callback when a client has send a", "# ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass of UTIL.TCP.SingleClientServer\"\"\" # --------------------------------------------------------------------------- def", "or (upperLine == \"QUIT\"): LOG(\"Quit requested\") # send the OK", "# * # The Space Python Library is distributed in", "TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle function", "UTIL.SYS, UTIL.TASK, UTIL.TCP ############# # constants # ############# LINEBUFFERLEN =", "terms of the MIT License as published by the *", "#!/usr/bin/env python3 #****************************************************************************** # (C) 2018, <NAME>, Austria * #", "############# # functions # ############# # ----------------------------------------------------------------------------- def initConfiguration(): \"\"\"initialise", "return if (upperLine == \"Q\") or (upperLine == \"QUIT\"): LOG(\"Quit", "if lastLine == \"\": # read of the data was", "initialise the model modelTask = UTIL.TASK.ProcessingTask(isParent=True) # register the console", "handler modelTask.registerConsoleHandler(consoleHandler) # create the TCP server LOG(\"Open the TCP", "# send the OK response back to the TECO retString", "\" + line) return 0 ############# # functions # #############", "entered (case insensitive) upperLine = line.upper() if (upperLine == \"X\")", "client is automatically disconnected return tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer +=", "it will be useful, * # but WITHOUT ANY WARRANTY;", "connection self.disconnectClient(); return if (upperLine == \"Q\") or (upperLine ==", "but WITHOUT ANY WARRANTY; without even the implied warranty of", "in the hope that it will be useful, * #", "def receiveCallback(self, socket, stateMask): \"\"\"Callback when a client has send", "last line has to be handled in a special way", "special way and can not be # processed directly lastLine", "UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer = \"\" # --------------------------------------------------------------------------- def receiveCallback(self,", "# terminate the client connection self.disconnectClient(); return if (upperLine ==", "\"X\") or (upperLine == \"EXIT\"): LOG(\"Exit requested\") # send the", "LOG(\"Quit requested\") # send the OK response back to the", "# ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle ---\") ########", "zyclic idle function idleFunction() # ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction)", "= UTIL.TASK.ProcessingTask(isParent=True) # register the console handler modelTask.registerConsoleHandler(consoleHandler) # create", "software; you can redistribute it and/or * # modify it", "# Unit Tests * #****************************************************************************** import sys from UTIL.SYS import", "FOR A PARTICULAR PURPOSE. See the MIT License * #", "from the data socket data = self.recv(LINEBUFFERLEN) if data ==", "line) return 0 ############# # functions # ############# # -----------------------------------------------------------------------------", "initialise the system configuration initConfiguration() # initialise the console handler", "idleFunction) LOG(\"--- idle ---\") ######## # main # ######## if", "Space Python Library is free software; you can redistribute it", "(case insensitive) upperLine = line.upper() if (upperLine == \"X\") or", "LOG(\"*** receiveCallback ***\") # read the next set of byte", "data was complete (incl. \"\\n\") pass else: # last line", "TCP server LOG(\"Open the TCP server\") createServer() # start the", "if data == None: # client is automatically disconnected return", "the hope that it will be useful, * # but", "of the data was complete (incl. \"\\n\") pass else: #", "######## # main # ######## if __name__ == \"__main__\": #", "\"\\n\") pass else: # last line was cutt off and", "line in lines: # remove a terminating \"\\r\" for clients", "OK response back to the client retString = \"OK\\n\" self.send(retString.encode())", "console handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the model modelTask", "#****************************************************************************** import sys from UTIL.SYS import Error, LOG, LOG_INFO, LOG_WARNING,", "= self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer) #", "# set the Error response back to the client: retString", "\"\"\"Initialise attributes only\"\"\" modelTask = UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer", "None: # client is automatically disconnected return tcpLineBuffer = self.tcpLineBuffer", "data socket data = self.recv(LINEBUFFERLEN) if data == None: #", "read of the data was complete (incl. \"\\n\") pass else:", "details. * #****************************************************************************** # Unit Tests * #****************************************************************************** import sys", "= lastLine for line in lines: # remove a terminating", "terminate the client connection self.disconnectClient(); sys.exit(0) # delegate the input", "line): \"\"\"Callback when a client has send a data line\"\"\"", "License as published by the * # Massachusetts Institute of", "function idleFunction() # ----------------------------------------------------------------------------- def idleFunction(): UTIL.TASK.s_processingTask.createTimeHandler(1000, idleFunction) LOG(\"--- idle", "like telnet if line[-1] == \"\\r\": line = line[:-1] #", "the console handler consoleHandler = UTIL.TASK.ConsoleHandler() # initialise the model", "cutt off and the rest should come with the next", "= lines[:-1] if lastLine == \"\": # read of the", "next read self.tcpLineBuffer = lastLine for line in lines: #", "system configuration initConfiguration() # initialise the console handler consoleHandler =", "\"OK\\n\" self.send(retString.encode()) # terminate the client connection self.disconnectClient(); return if", "to the client: retString = \"Error: execution failed (see log)!\\n\"", "= line.upper() if (upperLine == \"X\") or (upperLine == \"EXIT\"):", "remove a terminating \"\\r\" for clients like telnet if line[-1]", "the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\", \"1234\"]]) # -----------------------------------------------------------------------------", "See the MIT License * # for more details. *", "the next read self.tcpLineBuffer = lastLine for line in lines:", "= \"OK\\n\" self.send(retString.encode()) # terminate the client connection self.disconnectClient(); return", "= lines[-1] lines = lines[:-1] if lastLine == \"\": #", "# The Space Python Library is free software; you can", "to the client retString = \"OK\\n\" self.send(retString.encode()) # terminate the", "(upperLine == \"EXIT\"): LOG(\"Exit requested\") # send the OK response", "read the next set of byte from the data socket", "the last line has to be handled in a special", "main # ######## if __name__ == \"__main__\": # initialise the", "def initConfiguration(): \"\"\"initialise the system configuration\"\"\" UTIL.SYS.s_configuration.setDefaults([ [\"HOST\", \"127.0.0.1\"], [\"SERVER_PORT\",", "#****************************************************************************** # Unit Tests * #****************************************************************************** import sys from UTIL.SYS", "= TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic idle", "be useful, * # but WITHOUT ANY WARRANTY; without even", "tcpLineBuffer = self.tcpLineBuffer tcpLineBuffer += data.decode(\"ascii\") LOG(\"tcpLineBuffer: \" + tcpLineBuffer)", "***\") # read the next set of byte from the", "modelTask, portNr) self.tcpLineBuffer = \"\" # --------------------------------------------------------------------------- def receiveCallback(self, socket,", "send the OK response back to the TECO retString =", "# --------------------------------------------------------------------------- def __init__(self, portNr): \"\"\"Initialise attributes only\"\"\" modelTask =", "self.recv(LINEBUFFERLEN) if data == None: # client is automatically disconnected", "and the rest should come with the next read self.tcpLineBuffer", "== \"X\") or (upperLine == \"EXIT\"): LOG(\"Exit requested\") # send", "Unit Tests * #****************************************************************************** import sys from UTIL.SYS import Error,", "\"OK\\n\" self.send(retString.encode()) # terminate the client connection self.disconnectClient(); sys.exit(0) #", "data = self.recv(LINEBUFFERLEN) if data == None: # client is", "exit has been entered (case insensitive) upperLine = line.upper() if", "even the implied warranty of * # MERCHANTABILITY or FITNESS", "has send a data line\"\"\" LOG(\"line = \" + line)", "if (upperLine == \"Q\") or (upperLine == \"QUIT\"): LOG(\"Quit requested\")", "License * # for more details. * #****************************************************************************** # Unit", "can redistribute it and/or * # modify it under under", "is distributed in the hope that it will be useful,", "########### # classes # ########### # ============================================================================= class TCPserver(UTIL.TCP.SingleClientServer): \"\"\"Subclass", "lastLine = lines[-1] lines = lines[:-1] if lastLine == \"\":", "UTIL.TASK.s_processingTask UTIL.TCP.SingleClientServer.__init__(self, modelTask, portNr) self.tcpLineBuffer = \"\" # --------------------------------------------------------------------------- def", "# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the", "MIT License * # for more details. * #****************************************************************************** #", "server = TCPserver(portNr=int(UTIL.SYS.s_configuration.SERVER_PORT)) if not server.openConnectPort(UTIL.SYS.s_configuration.HOST): sys.exit(-1) # activate zyclic", "not be # processed directly lastLine = lines[-1] lines =", "and/or * # modify it under under the terms of", "a special way and can not be # processed directly", "extract the lines from the line buffer lines = tcpLineBuffer.split(\"\\n\")" ]
[ "'\\\\.py$', # Exclude pattern must be a string 'exclude': 0,", "[sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way our", "for item in schema.items if hasattr(item, 'key')} warn_additional, = [", "import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import validate_config_main", "cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg =", "string 'exclude': 0, 'args': ['foo', 'bar', 'baz'], }, ], }],", "to the way our merging works, if this schema has", "'hooks': [{'id': 'identity', 'language': 'python'}]}, # name override must be", "test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos': [], 'minimum_pre_commit_version':", "'Cannot specify both sha and rev' @pytest.mark.parametrize( 'dct', ( {'repo':", "{'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def", "1.0.0\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples", "language key {'pony': '1.0'}, # not a string for version", "'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'}, ),", "'b'} @pytest.mark.parametrize( 'config_repo', ( # i-dont-exist isn't a valid hook", "always_run and files are permissible [{ 'id': 'a', 'name': 'b',", "{'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo', ( # i-dont-exist isn't", "import pytest import pre_commit.constants as C from pre_commit.clientlib import check_type_tag", "fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert", "both sha and rev' @pytest.mark.parametrize( 'dct', ( {'repo': 'a'}, {'repo':", "run `pip install --upgrade pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing():", "cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA)", "'\\\\.py$'}], }], }, True, ), ( { 'repos': [{ 'repo':", "assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write(", "'config_repo', ( # i-dont-exist isn't a valid hook {'repo': 'meta',", "( 'pre_commit', logging.WARNING, 'Unexpected config key(s): args', ), ] def", "@pytest.mark.parametrize( 'dct', ( {'repo': 'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo':", "test_warn_additional(schema): allowed_keys = {item.key for item in schema.items if hasattr(item,", "'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'}", "== [ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): foo', ),", "key {'pony': '1.0'}, # not a string for version {'python':", "version {'python': 3}, ), ) def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping,", "def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml')", "def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError)", "works, if this schema has any defaults they will clobber", "ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is expected @pytest.mark.parametrize( 'dct',", "'id': 'pyflakes', 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz'], }, ],", "pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import", "with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct = {'repo': 'a', 'sha':", "True, ), ( # A regression in 0.13.5: always_run and", "import MigrateShaToRev from pre_commit.clientlib import validate_config_main from pre_commit.clientlib import validate_manifest_main", "validate_config_main from pre_commit.clientlib import validate_manifest_main from testing.fixtures import sample_local_config def", "'wat'}, {'repo': 'wat', 'rev': 'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct)", "['foo', 'bar', 'baz'], }, ], }], }, True, ), (", "is expected @pytest.mark.parametrize( 'dct', ( {'repo': 'local'}, {'repo': 'meta'}, {'repo':", "from __future__ import unicode_literals import logging import cfgv import pytest", "'id': 'pyflakes', 'files': '\\\\.py$', # Exclude pattern must be a", "test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as", "import logging import cfgv import pytest import pre_commit.constants as C", "== ( '\\n' '==> At Config()\\n' '==> At key: minimum_pre_commit_version\\n'", "( '\\n' '==> At Config()\\n' '==> At key: minimum_pre_commit_version\\n' '=====>", "'1.0'}, # not a string for version {'python': 3}, ),", "not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok():", "return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value)", "from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib", "not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,))", "}], }, True, ), ( { 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks',", "caplog): f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' '", "test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n'", "set a language for a meta hook {'repo': 'meta', 'hooks':", "fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), ( ( [{ 'id':", "DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos':", "== 'Cannot specify both sha and rev' @pytest.mark.parametrize( 'dct', (", "' args: [--some-args]\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val", "CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',))", "@pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj',", "MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'}) msg, = excinfo.value.args assert", "@pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{')", "'sha': 'b', 'rev': 'c'}) msg, = excinfo.value.args assert msg ==", "def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos': [],", "cfgv.Optional) def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not", "'files': r'\\.py$', }], True, ), ( # A regression in", "'files': '', 'always_run': True, }], True, ), ), ) def", "'- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n' ' -", "install --upgrade pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg =", "assert not ret_val assert caplog.record_tuples == [ ( 'pre_commit', logging.WARNING,", "test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks: [{id: identity}]}')", "{'repo': 'meta', 'rev': 'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError):", "dct == {'repo': 'a', 'rev': 'b'} def test_migrate_to_sha_ok(): dct =", "rev: 3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' 'foo:\\n' '", "'meta'}, {'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'}, ),", "return True except cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def", "check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from", "schema has any defaults they will clobber potentially useful values", "but version {} is ' 'installed. Perhaps run `pip install", "'pre_commit', logging.WARNING, 'Unexpected config key(s): foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main,", "from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib", "language for a meta hook {'repo': 'meta', 'hooks': [{'id': 'identity',", "@pytest.mark.parametrize( 'config_repo', ( # i-dont-exist isn't a valid hook {'repo':", "('config_obj', 'expected'), ( ( { 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev':", "caplog.record_tuples == [ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): args',", "'files': '\\\\.py$'}], }], }, True, ), ( { 'repos': [{", "expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is expected def", "' rev: 3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' 'foo:\\n'", "`pip install --upgrade pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg", "'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), (", ") def test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret", "'python3.4', 'files': r'\\.py$', }], True, ), ( # A regression", "<reponame>yoavcaspi/pre-commit from __future__ import unicode_literals import logging import cfgv import", "dct == {'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo', ( #", "'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files':", "'entry': 'c', 'language': 'python', 'language_version': 'python3.4', 'files': r'\\.py$', }], True,", "{repo: meta, hooks: [{id: identity}]}') assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir,", "'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'language_version': 'python3.4',", "'key')} warn_additional, = [ x for x in schema.items if", "'c', 'language': 'python', 'files': r'\\.py$', }], True, ), ( [{", "'name': 'b', 'entry': 'c', 'language': 'python', 'files': '', 'always_run': True,", "if hasattr(item, 'key')} warn_additional, = [ x for x in", "(validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema", "required but version {} is ' 'installed. Perhaps run `pip", "# invalid to set a language for a meta hook", "[], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( '\\n'", "[ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): foo', ), ]", "hook {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, # invalid to set", "MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'} def test_migrate_to_sha_ok():", "try: cfgv.validate(obj, obj_schema) return True except cfgv.ValidationError: return False @pytest.mark.parametrize('value',", "'a', 'rev': 'b'} def test_migrate_to_sha_ok(): dct = {'repo': 'a', 'rev':", "with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]}", "== [ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): args', ),", "expected @pytest.mark.parametrize( 'dct', ( {'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat',", "for a meta hook {'repo': 'meta', 'hooks': [{'id': 'identity', 'language':", "string {'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]}, ), )", "'b', 'entry': 'c', 'language': 'python', 'files': '', 'always_run': True, }],", "'==> At Config()\\n' '==> At key: minimum_pre_commit_version\\n' '=====> pre-commit version", "meta, hooks: [{id: identity}]}') assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog):", "0.13.5: always_run and files are permissible [{ 'id': 'a', 'name':", "sample_local_config def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema) return True except", "'hooks': [{'id': 'i-dont-exist'}]}, # invalid to set a language for", "@pytest.mark.parametrize( 'mapping', ( # invalid language key {'pony': '1.0'}, #", "= tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks: [{id: identity}]}') assert not", "( # invalid language key {'pony': '1.0'}, # not a", "in schema.items if isinstance(x, cfgv.WarnAdditionalKeys) ] assert allowed_keys == set(warn_additional.keys)", "import validate_config_main from pre_commit.clientlib import validate_manifest_main from testing.fixtures import sample_local_config", "def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b',", "'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct):", "hasattr(item, 'key')} warn_additional, = [ x for x in schema.items", "test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct = {'repo': 'a',", "{'python': 3}, ), ) def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION)", "key(s): foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn):", "key(s): args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml')", "( # A regression in 0.13.5: always_run and files are", "not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert", "( [{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python',", "tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj',", "clobber potentially useful values in the backing manifest. #227 \"\"\"", "unicode_literals import logging import cfgv import pytest import pre_commit.constants as", "pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg,", "' rev: 3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' '", "'a'}, {'repo': 'meta', 'rev': 'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with", "--upgrade pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg = {'repos':", "item in schema.items if hasattr(item, 'key')} warn_additional, = [ x", "= tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n'", "cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way our merging", "pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct = {'repo': 'a', 'sha': 'b'}", "'hooks': [{'id': 'identity', 'name': False}]}, ), ) def test_meta_hook_invalid(config_repo): with", "r'\\.py$', }], True, ), ( [{ 'id': 'a', 'name': 'b',", "def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults():", "key: minimum_pre_commit_version\\n' '=====> pre-commit version 999 is required but version", "'name': 'b', 'entry': 'c', 'language': 'python', 'files': r'\\.py$', }], True,", "'python', 'files': '', 'always_run': True, }], True, ), ), )", "= validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples == [ (", "{'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj", "as excinfo: cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA)", "- id: flake8\\n' ' args: [--some-args]\\n', ) ret_val = validate_config_main((f.strpath,))", "foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml", "{'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, # invalid to set a", "'Unexpected config key(s): args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f", "= excinfo.value.args assert msg == 'Cannot specify both sha and", "'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', 'args': ['foo',", "( # i-dont-exist isn't a valid hook {'repo': 'meta', 'hooks':", "{'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]}, # name override", "' 'installed. Perhaps run `pip install --upgrade pre-commit`.'.format( C.VERSION, )", "C from pre_commit.clientlib import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from", "'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev':", "def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo':", "{'pony': '1.0'}, # not a string for version {'python': 3},", "#227 \"\"\" for item in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional)", "config key(s): foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir,", "'entry': 'c', 'language': 'python', 'files': '', 'always_run': True, }], True,", "x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys) ] assert", "'bar', 'baz'], }, ], }], }, True, ), ( {", "= tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize(", "pre-commit version 999 is required but version {} is '", "test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('-", "dct = {'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct ==", "'dct', ( {'repo': 'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta',", "def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), ( (", "meta hook {'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]}, #", "way our merging works, if this schema has any defaults", "'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files': '',", "flake8\\n' ' args: [--some-args]\\n', ) ret_val = validate_config_main((f.strpath,)) assert not", "'foo:\\n' ' id: 1.0.0\\n', ) ret_val = validate_config_main((f.strpath,)) assert not", "'meta', 'hooks': [{'id': 'identity', 'name': False}]}, ), ) def test_meta_hook_invalid(config_repo):", "{'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way", "'entry': 'c', 'language': 'python', 'files': r'\\.py$', }], True, ), (", "'files': r'\\.py$', }], True, ), ( [{ 'id': 'a', 'name':", "not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('- {repo: meta,", "[ { 'id': 'pyflakes', 'files': '\\\\.py$', # Exclude pattern must", "CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from", "CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way our merging works,", "isn't a valid hook {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, #", "), ) def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping',", "{'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a',", "'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files': '', 'always_run':", "'Unexpected config key(s): foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def", "def test_migrate_to_sha_apply_default(): dct = {'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert", "test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( # invalid", "{item.key for item in schema.items if hasattr(item, 'key')} warn_additional, =", "assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('- {repo:", "hooks: [{id: identity}]}') assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f", "as C from pre_commit.clientlib import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT", "[], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema):", "}], }, False, ), ), ) def test_config_valid(config_obj, expected): ret", "}, False, ), ), ) def test_config_valid(config_obj, expected): ret =", "args: [--some-args]\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val assert", ") def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct =", "are permissible [{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language':", "[{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files':", "'\\\\.py$', 'args': ['foo', 'bar', 'baz'], }, ], }], }, True,", "def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n'", "[{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}],", "pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import", "'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'}, ), ) def", "excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'}) msg, = excinfo.value.args", "cfgv.validate(obj, obj_schema) return True except cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag',", "'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', # Exclude pattern", "), ), ) def test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA)", "config key(s): args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f =", "has any defaults they will clobber potentially useful values in", "'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', 'args': ['foo', 'bar',", "'rev': 'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def", ") ret_val = validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples ==", "def test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is", "= {'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo':", "'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files':", "# name override must be string {'repo': 'meta', 'hooks': [{'id':", "\"\"\"Due to the way our merging works, if this schema", "warn_additional, = [ x for x in schema.items if isinstance(x,", "assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), ( ( [{ 'id': 'a',", "from pre_commit.clientlib import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib", "}], True, ), ( # A regression in 0.13.5: always_run", "allowed_keys = {item.key for item in schema.items if hasattr(item, 'key')}", "in the backing manifest. #227 \"\"\" for item in CONFIG_HOOK_DICT.items:", "'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}], }], }, True, ), (", "( {'repo': 'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev':", "'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo', ( # i-dont-exist isn't a", "= {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) ==", "hooks:\\n' ' - id: flake8\\n' 'foo:\\n' ' id: 1.0.0\\n', )", "), ), ) def test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA)", "'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'} def", "import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev", "[{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'language_version':", "'b'} def test_migrate_to_sha_ok(): dct = {'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct)", "True, }], True, ), ), ) def test_valid_manifests(manifest_obj, expected): ret", "999 is required but version {} is ' 'installed. Perhaps", "'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}], }], }, True,", "for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys) ] assert allowed_keys", "potentially useful values in the backing manifest. #227 \"\"\" for", "regression in 0.13.5: always_run and files are permissible [{ 'id':", "( ( [{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language':", "ret_val = validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples == [", "'name': False}]}, ), ) def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT)", "), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n'", "# A regression in 0.13.5: always_run and files are permissible", "def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct = {'repo':", "import unicode_literals import logging import cfgv import pytest import pre_commit.constants", "from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import validate_config_main from pre_commit.clientlib", "'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', #", "['foo', 'bar', 'baz'], }, ], }], }, False, ), ),", "our merging works, if this schema has any defaults they", ") def test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret", "excinfo: cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert", "not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',))", "'name': 'b', 'entry': 'c', 'language': 'python', 'language_version': 'python3.4', 'files': r'\\.py$',", "logging.WARNING, 'Unexpected config key(s): foo', ), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main))", "is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema) return True except cfgv.ValidationError: return", "caplog): f = tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev:", "with pytest.raises(cfgv.ValidationError) as excinfo: cfg = {'repos': [], 'minimum_pre_commit_version': '999'}", "expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is expected @pytest.mark.parametrize(", "expected def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError):", "'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', # Exclude", "Perhaps run `pip install --upgrade pre-commit`.'.format( C.VERSION, ) ) def", "= tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert", "def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj,", ") def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo:", "[{'id': 'identity', 'name': False}]}, ), ) def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError):", "'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'language_version': 'python3.4', 'files':", "'rev': 'b'} @pytest.mark.parametrize( 'config_repo', ( # i-dont-exist isn't a valid", "values in the backing manifest. #227 \"\"\" for item in", "cfgv import pytest import pre_commit.constants as C from pre_commit.clientlib import", "not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f", "import sample_local_config def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema) return True", "name override must be string {'repo': 'meta', 'hooks': [{'id': 'identity',", "cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( '\\n' '==> At Config()\\n'", "will clobber potentially useful values in the backing manifest. #227", "'meta', 'rev': 'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct)", ") def test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg,", "is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is expected @pytest.mark.parametrize( 'dct', ( {'repo':", "CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys = {item.key for item in schema.items", "must be string {'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]},", "== {'repo': 'a', 'rev': 'b'} def test_migrate_to_sha_ok(): dct = {'repo':", "test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way our merging works, if this", "def test_warn_additional(schema): allowed_keys = {item.key for item in schema.items if", "not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( '-", "3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' ' args: [--some-args]\\n',", "' - id: flake8\\n' ' args: [--some-args]\\n', ) ret_val =", "'language': 'python', 'language_version': 'python3.4', 'files': r'\\.py$', }], True, ), (", "'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files': r'\\.py$',", "assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), (", "i-dont-exist isn't a valid hook {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]},", "'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'}", "'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'}, ), ) def", "True except cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value):", "isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert", "{ 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes',", "{'repo': 'a'}, {'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'},", "= {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes():", "' hooks:\\n' ' - id: flake8\\n' ' args: [--some-args]\\n', )", "'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}], }],", "msg == 'Cannot specify both sha and rev' @pytest.mark.parametrize( 'dct',", "'pyflakes', 'files': '\\\\.py$', # Exclude pattern must be a string", "{ 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ {", "test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), ( ( {", "id: flake8\\n' ' args: [--some-args]\\n', ) ret_val = validate_config_main((f.strpath,)) assert", "assert ret is expected @pytest.mark.parametrize( 'dct', ( {'repo': 'local'}, {'repo':", "valid hook {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, # invalid to", "cfg = {'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value)", "validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks:", "f = tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n'", "with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'})", "( 'pre_commit', logging.WARNING, 'Unexpected config key(s): foo', ), ] @pytest.mark.parametrize('fn',", "# Exclude pattern must be a string 'exclude': 0, 'args':", "), ( { 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks':", "[{id: identity}]}') assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f =", "), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct", "not a string for version {'python': 3}, ), ) def", "version {} is ' 'installed. Perhaps run `pip install --upgrade", "item in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert", "import validate_manifest_main from testing.fixtures import sample_local_config def is_valid_according_to_schema(obj, obj_schema): try:", "'a', 'name': 'b', 'entry': 'c', 'language': 'python', 'files': r'\\.py$', }],", "[dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj =", "https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n'", "{'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'},", "dct = {'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct ==", "'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz'], }, ], }], },", "validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( '- repo:", "repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n' ' - id:", "excinfo.value.args assert msg == 'Cannot specify both sha and rev'", "' hooks:\\n' ' - id: flake8\\n' 'foo:\\n' ' id: 1.0.0\\n',", "'mapping', ( # invalid language key {'pony': '1.0'}, # not", "'pyflakes', 'files': '\\\\.py$'}], }], }, True, ), ( { 'repos':", "'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev':", "), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as", "}], True, ), ), ) def test_valid_manifests(manifest_obj, expected): ret =", "test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}')", "obj_schema): try: cfgv.validate(obj, obj_schema) return True except cfgv.ValidationError: return False", "'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys", "pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import validate_config_main from pre_commit.clientlib import", "] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml')", "@pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys = {item.key for item", "'args': ['foo', 'bar', 'baz'], }, ], }], }, True, ),", "pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo: cfg", "[{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes',", "ret is expected def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]}", "not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'),", "test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a',", "pre_commit.clientlib import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import", "pre_commit.constants as C from pre_commit.clientlib import check_type_tag from pre_commit.clientlib import", "a valid hook {'repo': 'meta', 'hooks': [{'id': 'i-dont-exist'}]}, # invalid", "def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',))", "validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir): f =", "MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import validate_config_main from", ") def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with", "True, ), ( { 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',", "def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( #", "f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n' '", "tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks: [{id: identity}]}') assert not validate_config_main((f.strpath,))", "test_migrate_to_sha_apply_default(): dct = {'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct", "must be a string 'exclude': 0, 'args': ['foo', 'bar', 'baz'],", "rev' @pytest.mark.parametrize( 'dct', ( {'repo': 'a'}, {'repo': 'meta', 'sha': 'a'},", "a string 'exclude': 0, 'args': ['foo', 'bar', 'baz'], }, ],", "test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is expected", "def test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA)", "from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib", "\"\"\" for item in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def", "cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys = {item.key", "def test_validate_config_old_list_format_ok(tmpdir): f = tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks: [{id:", "and files are permissible [{ 'id': 'a', 'name': 'b', 'entry':", "{'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a',", "defaults they will clobber potentially useful values in the backing", "any defaults they will clobber potentially useful values in the", "[--some-args]\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples", "sha and rev' @pytest.mark.parametrize( 'dct', ( {'repo': 'a'}, {'repo': 'meta',", "'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}], }], },", "}, True, ), ( { 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev':", "= {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the", "'dct', ( {'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'},", "), ) def test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert", "{'repo': 'wat', 'rev': 'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def", "override must be string {'repo': 'meta', 'hooks': [{'id': 'identity', 'name':", "('manifest_obj', 'expected'), ( ( [{ 'id': 'a', 'name': 'b', 'entry':", "'python'}]}, # name override must be string {'repo': 'meta', 'hooks':", "useful values in the backing manifest. #227 \"\"\" for item", "'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct): with pytest.raises(cfgv.ValidationError): MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default():", "cfg = {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA,", "At Config()\\n' '==> At key: minimum_pre_commit_version\\n' '=====> pre-commit version 999", "pre_commit.clientlib import validate_config_main from pre_commit.clientlib import validate_manifest_main from testing.fixtures import", "}, ], }], }, True, ), ( { 'repos': [{", "in 0.13.5: always_run and files are permissible [{ 'id': 'a',", "True, ), ), ) def test_valid_manifests(manifest_obj, expected): ret = is_valid_according_to_schema(manifest_obj,", "id: 1.0.0\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val assert", "identity}]}') assert not validate_config_main((f.strpath,)) def test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml')", "= [ x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys)", "[ x for x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys) ]", "validate_manifest_main from testing.fixtures import sample_local_config def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj,", "= {'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo':", ") def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', (", "= is_valid_according_to_schema(manifest_obj, MANIFEST_SCHEMA) assert ret is expected @pytest.mark.parametrize( 'dct', (", "test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev':", "cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( # invalid language key {'pony':", "def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema) return True except cfgv.ValidationError:", "from pre_commit.clientlib import validate_config_main from pre_commit.clientlib import validate_manifest_main from testing.fixtures", "'c'}) msg, = excinfo.value.args assert msg == 'Cannot specify both", "ret is expected @pytest.mark.parametrize( 'dct', ( {'repo': 'local'}, {'repo': 'meta'},", "'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$', 'args':", "'rev': 'c'}) msg, = excinfo.value.args assert msg == 'Cannot specify", "'b', 'entry': 'c', 'language': 'python', 'files': r'\\.py$', }], True, ),", "False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize(", "{'repo': 'a', 'rev': 'b'} def test_migrate_to_sha_ok(): dct = {'repo': 'a',", "'language': 'python', 'files': '', 'always_run': True, }], True, ), ),", "backing manifest. #227 \"\"\" for item in CONFIG_HOOK_DICT.items: assert not", "MANIFEST_SCHEMA) assert ret is expected @pytest.mark.parametrize( 'dct', ( {'repo': 'local'},", "import cfgv import pytest import pre_commit.constants as C from pre_commit.clientlib", "schema.items if hasattr(item, 'key')} warn_additional, = [ x for x", "test_migrate_to_sha_ok(): dct = {'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct", "import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA", "'expected'), ( ( [{ 'id': 'a', 'name': 'b', 'entry': 'c',", "), ( [{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language':", "invalid language key {'pony': '1.0'}, # not a string for", "with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), ( ( { 'repos':", "f.write('- {repo: meta, hooks: [{id: identity}]}') assert not validate_config_main((f.strpath,)) def", "the way our merging works, if this schema has any", "pre_commit.clientlib import validate_manifest_main from testing.fixtures import sample_local_config def is_valid_according_to_schema(obj, obj_schema):", "}, ], }], }, False, ), ), ) def test_config_valid(config_obj,", "caplog.record_tuples == [ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): foo',", "pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj,", "[{'id': 'identity', 'language': 'python'}]}, # name override must be string", "MigrateShaToRev from pre_commit.clientlib import validate_config_main from pre_commit.clientlib import validate_manifest_main from", "'rev': 'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with", "'b', 'rev': 'c'}) msg, = excinfo.value.args assert msg == 'Cannot", "3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' 'foo:\\n' ' id:", "specify both sha and rev' @pytest.mark.parametrize( 'dct', ( {'repo': 'a'},", "__future__ import unicode_literals import logging import cfgv import pytest import", "MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo',", "0, 'args': ['foo', 'bar', 'baz'], }, ], }], }, False,", "'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id': 'pyflakes', 'files': '\\\\.py$'}], }], }, True, ),", "str(excinfo.value) == ( '\\n' '==> At Config()\\n' '==> At key:", "assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), ( ( [{", "'wat', 'rev': 'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both():", "# i-dont-exist isn't a valid hook {'repo': 'meta', 'hooks': [{'id':", "assert str(excinfo.value) == ( '\\n' '==> At Config()\\n' '==> At", "' - id: flake8\\n' 'foo:\\n' ' id: 1.0.0\\n', ) ret_val", "string for version {'python': 3}, ), ) def test_default_language_version_invalid(mapping): with", "'identity', 'language': 'python'}]}, # name override must be string {'repo':", "{ 'id': 'pyflakes', 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz'], },", "@pytest.mark.parametrize( ('config_obj', 'expected'), ( ( { 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks',", "'999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( '\\n' '==> At", "False, ), ), ) def test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj,", "def test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml')", "CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys = {item.key for", "{} is ' 'installed. Perhaps run `pip install --upgrade pre-commit`.'.format(", "DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from", "'', 'always_run': True, }], True, ), ), ) def test_valid_manifests(manifest_obj,", "tmpdir.join('f.notyaml') not_yaml.write('{') not_schema = tmpdir.join('notconfig.yaml') not_schema.write('{}') assert fn(('does-not-exist',)) assert fn((not_yaml.strpath,))", "'==> At key: minimum_pre_commit_version\\n' '=====> pre-commit version 999 is required", "in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert not", "@pytest.mark.parametrize( ('manifest_obj', 'expected'), ( ( [{ 'id': 'a', 'name': 'b',", "( { 'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [", "'c', 'language': 'python', 'files': '', 'always_run': True, }], True, ),", "args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write(", "[{'id': 'pyflakes', 'files': '\\\\.py$'}], }], }, True, ), ( {", "logging import cfgv import pytest import pre_commit.constants as C from", "def test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is", "this schema has any defaults they will clobber potentially useful", "from pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib", "{'repo': 'meta', 'sha': 'a'}, {'repo': 'meta', 'rev': 'a'}, ), )", "assert not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def", "'0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys =", "'files': '\\\\.py$', # Exclude pattern must be a string 'exclude':", "[ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s): args', ), ]", "import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA", "'baz'], }, ], }], }, False, ), ), ) def", "obj_schema) return True except cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel'))", "False}]}, ), ) def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize(", "'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == ( '\\n' '==>", "{'repos': [], 'minimum_pre_commit_version': '999'} cfgv.validate(cfg, CONFIG_SCHEMA) assert str(excinfo.value) == (", "is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is expected def test_local_hooks_with_rev_fails(): config_obj =", "a meta hook {'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]},", "CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from", "ret_val assert caplog.record_tuples == [ ( 'pre_commit', logging.WARNING, 'Unexpected config", "from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib", "pre_commit.clientlib import MANIFEST_SCHEMA from pre_commit.clientlib import MigrateShaToRev from pre_commit.clientlib import", "{'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat', 'rev': 'wat'}, ), )", "(CONFIG_SCHEMA, CONFIG_REPO_DICT)) def test_warn_additional(schema): allowed_keys = {item.key for item in", "'repos': [{ 'repo': '<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id':", "permissible [{ 'id': 'a', 'name': 'b', 'entry': 'c', 'language': 'python',", "'\\n' '==> At Config()\\n' '==> At key: minimum_pre_commit_version\\n' '=====> pre-commit", "if this schema has any defaults they will clobber potentially", "manifest. #227 \"\"\" for item in CONFIG_HOOK_DICT.items: assert not isinstance(item,", "'meta', 'hooks': [{'id': 'i-dont-exist'}]}, # invalid to set a language", "a language for a meta hook {'repo': 'meta', 'hooks': [{'id':", "pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( # invalid language key", "f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n'", "check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), ( ( { 'repos': [{ 'repo':", "rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos':", "and rev' @pytest.mark.parametrize( 'dct', ( {'repo': 'a'}, {'repo': 'meta', 'sha':", "for item in CONFIG_HOOK_DICT.items: assert not isinstance(item, cfgv.Optional) def test_validate_manifest_main_ok():", "- id: flake8\\n' 'foo:\\n' ' id: 1.0.0\\n', ) ret_val =", "CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( # invalid language key {'pony': '1.0'},", "'a', 'sha': 'b', 'rev': 'c'}) msg, = excinfo.value.args assert msg", "@pytest.mark.parametrize( 'dct', ( {'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha':", "with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo, CONFIG_REPO_DICT) @pytest.mark.parametrize( 'mapping', ( # invalid language", "= {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema', (CONFIG_SCHEMA, CONFIG_REPO_DICT))", "fn(('does-not-exist',)) assert fn((not_yaml.strpath,)) assert fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), ( (", "import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION", "from testing.fixtures import sample_local_config def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema)", "they will clobber potentially useful values in the backing manifest.", "pattern must be a string 'exclude': 0, 'args': ['foo', 'bar',", "3}, ), ) def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def", "'python', 'files': r'\\.py$', }], True, ), ( [{ 'id': 'a',", "except cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with", "MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha':", "test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due", "'python', 'language_version': 'python3.4', 'files': r'\\.py$', }], True, ), ( #", "validate_config_main((f.strpath,)) assert not ret_val assert caplog.record_tuples == [ ( 'pre_commit',", "# invalid language key {'pony': '1.0'}, # not a string", "merging works, if this schema has any defaults they will", "), ( # A regression in 0.13.5: always_run and files", "'identity', 'name': False}]}, ), ) def test_meta_hook_invalid(config_repo): with pytest.raises(cfgv.ValidationError): cfgv.validate(config_repo,", "{'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'}, {'repo': 'wat',", "validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml = tmpdir.join('f.notyaml') not_yaml.write('{') not_schema =", "A regression in 0.13.5: always_run and files are permissible [{", "cfgv.ValidationError: return False @pytest.mark.parametrize('value', ('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError):", "minimum_pre_commit_version\\n' '=====> pre-commit version 999 is required but version {}", "Exclude pattern must be a string 'exclude': 0, 'args': ['foo',", "f = tmpdir.join('cfg.yaml') f.write('- {repo: meta, hooks: [{id: identity}]}') assert", "test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA)", "( {'repo': 'local'}, {'repo': 'meta'}, {'repo': 'wat', 'sha': 'wat'}, {'repo':", "== {'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo', ( # i-dont-exist", "test_validate_manifest_main_ok(): assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def", "'args': ['foo', 'bar', 'baz'], }, ], }], }, False, ),", "'language': 'python', 'files': r'\\.py$', }], True, ), ( [{ 'id':", "'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev':", "hooks:\\n' ' - id: flake8\\n' ' args: [--some-args]\\n', ) ret_val", "from pre_commit.clientlib import validate_manifest_main from testing.fixtures import sample_local_config def is_valid_according_to_schema(obj,", "hook {'repo': 'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]}, # name", "'installed. Perhaps run `pip install --upgrade pre-commit`.'.format( C.VERSION, ) )", "'b', 'entry': 'c', 'language': 'python', 'language_version': 'python3.4', 'files': r'\\.py$', }],", "At key: minimum_pre_commit_version\\n' '=====> pre-commit version 999 is required but", "logging.WARNING, 'Unexpected config key(s): args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog):", "True, ), ( [{ 'id': 'a', 'name': 'b', 'entry': 'c',", "= {item.key for item in schema.items if hasattr(item, 'key')} warn_additional,", "( ( { 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks':", "to set a language for a meta hook {'repo': 'meta',", "'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n' '", "testing.fixtures import sample_local_config def is_valid_according_to_schema(obj, obj_schema): try: cfgv.validate(obj, obj_schema) return", "as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'}) msg, =", "pytest.raises(cfgv.ValidationError) as excinfo: MigrateShaToRev().check({'repo': 'a', 'sha': 'b', 'rev': 'c'}) msg,", "'rev': 'b'} def test_migrate_to_sha_ok(): dct = {'repo': 'a', 'rev': 'b'}", "MigrateShaToRev().check(dct) def test_migrate_to_sha_apply_default(): dct = {'repo': 'a', 'sha': 'b'} MigrateShaToRev().apply_default(dct)", "def test_migrate_to_sha_ok(): dct = {'repo': 'a', 'rev': 'b'} MigrateShaToRev().apply_default(dct) assert", "), ) def test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert", "pre_commit.clientlib import CONFIG_REPO_DICT from pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import", "'always_run': True, }], True, ), ), ) def test_valid_manifests(manifest_obj, expected):", "def test_config_schema_does_not_contain_defaults(): \"\"\"Due to the way our merging works, if", "pytest import pre_commit.constants as C from pre_commit.clientlib import check_type_tag from", "assert ret is expected def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(),", "' id: 1.0.0\\n', ) ret_val = validate_config_main((f.strpath,)) assert not ret_val", "fn((not_schema.strpath,)) @pytest.mark.parametrize( ('manifest_obj', 'expected'), ( ( [{ 'id': 'a', 'name':", "{ 'id': 'pyflakes', 'files': '\\\\.py$', # Exclude pattern must be", "CONFIG_SCHEMA) def test_config_with_local_hooks_definition_passes(): config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def", "[ { 'id': 'pyflakes', 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz'],", "is required but version {} is ' 'installed. Perhaps run", "= tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' '", "files are permissible [{ 'id': 'a', 'name': 'b', 'entry': 'c',", "assert msg == 'Cannot specify both sha and rev' @pytest.mark.parametrize(", "] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '-", "is ' 'installed. Perhaps run `pip install --upgrade pre-commit`.'.format( C.VERSION,", "test_config_valid(config_obj, expected): ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is expected", "'c', 'language': 'python', 'language_version': 'python3.4', 'files': r'\\.py$', }], True, ),", "), ] @pytest.mark.parametrize('fn', (validate_config_main, validate_manifest_main)) def test_mains_not_ok(tmpdir, fn): not_yaml =", "'pyflakes', 'files': '\\\\.py$', 'args': ['foo', 'bar', 'baz'], }, ], }],", "Config()\\n' '==> At key: minimum_pre_commit_version\\n' '=====> pre-commit version 999 is", "CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import MANIFEST_SCHEMA from", "version 999 is required but version {} is ' 'installed.", "= is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is expected def test_local_hooks_with_rev_fails(): config_obj", "r'\\.py$', }], True, ), ( # A regression in 0.13.5:", "pre-commit`.'.format( C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg = {'repos': [],", "pre_commit.clientlib import CONFIG_SCHEMA from pre_commit.clientlib import DEFAULT_LANGUAGE_VERSION from pre_commit.clientlib import", "be a string 'exclude': 0, 'args': ['foo', 'bar', 'baz'], },", "rev: 3.7.7\\n' ' hooks:\\n' ' - id: flake8\\n' ' args:", "[{'id': 'i-dont-exist'}]}, # invalid to set a language for a", "CONFIG_SCHEMA) assert str(excinfo.value) == ( '\\n' '==> At Config()\\n' '==>", "'pre_commit', logging.WARNING, 'Unexpected config key(s): args', ), ] def test_validate_warn_on_unknown_keys_at_top_level(tmpdir,", "], }], }, True, ), ( { 'repos': [{ 'repo':", "tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' ' hooks:\\n'", "config_obj = {'repos': [sample_local_config()]} cfgv.validate(config_obj, CONFIG_SCHEMA) def test_config_schema_does_not_contain_defaults(): \"\"\"Due to", "'=====> pre-commit version 999 is required but version {} is", "{'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]}, ), ) def", "'bar', 'baz'], }, ], }], }, False, ), ), )", "assert not validate_manifest_main(('.pre-commit-hooks.yaml',)) def test_validate_config_main_ok(): assert not validate_config_main(('.pre-commit-config.yaml',)) def test_validate_config_old_list_format_ok(tmpdir):", "'wat'}, ), ) def test_migrate_sha_to_rev_ok(dct): MigrateShaToRev().check(dct) def test_migrate_sha_to_rev_dont_specify_both(): with pytest.raises(cfgv.ValidationError)", "'sha': 'a'}, {'repo': 'meta', 'rev': 'a'}, ), ) def test_migrate_sha_to_rev_conditional_check_failures(dct):", "with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing(): with pytest.raises(cfgv.ValidationError) as excinfo:", "'baz'], }, ], }], }, True, ), ( { 'repos':", "'language': 'python'}]}, # name override must be string {'repo': 'meta',", "pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'), ( ( { 'repos': [{", "'b'} MigrateShaToRev().apply_default(dct) assert dct == {'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize(", "x in schema.items if isinstance(x, cfgv.WarnAdditionalKeys) ] assert allowed_keys ==", "not ret_val assert caplog.record_tuples == [ ( 'pre_commit', logging.WARNING, 'Unexpected", "for version {'python': 3}, ), ) def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError):", "id: flake8\\n' 'foo:\\n' ' id: 1.0.0\\n', ) ret_val = validate_config_main((f.strpath,))", "'<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [ { 'id': 'pyflakes', 'files': '\\\\.py$',", "], }], }, False, ), ), ) def test_config_valid(config_obj, expected):", "is expected def test_local_hooks_with_rev_fails(): config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with", "flake8\\n' 'foo:\\n' ' id: 1.0.0\\n', ) ret_val = validate_config_main((f.strpath,)) assert", "C.VERSION, ) ) def test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version':", "('definitely-not-a-tag', 'fiel')) def test_check_type_tag_failures(value): with pytest.raises(cfgv.ValidationError): check_type_tag(value) @pytest.mark.parametrize( ('config_obj', 'expected'),", "'i-dont-exist'}]}, # invalid to set a language for a meta", "assert caplog.record_tuples == [ ( 'pre_commit', logging.WARNING, 'Unexpected config key(s):", ") ) def test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version': '0'}", "a string for version {'python': 3}, ), ) def test_default_language_version_invalid(mapping):", "), ) def test_default_language_version_invalid(mapping): with pytest.raises(cfgv.ValidationError): cfgv.validate(mapping, DEFAULT_LANGUAGE_VERSION) def test_minimum_pre_commit_version_failing():", "test_validate_warn_on_unknown_keys_at_repo_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( '- repo: https://gitlab.com/pycqa/flake8\\n' '", "import pre_commit.constants as C from pre_commit.clientlib import check_type_tag from pre_commit.clientlib", "'exclude': 0, 'args': ['foo', 'bar', 'baz'], }, ], }], },", "config_obj = {'repos': [dict(sample_local_config(), rev='foo')]} with pytest.raises(cfgv.ValidationError): cfgv.validate(config_obj, CONFIG_SCHEMA) def", "f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev:", "def test_validate_warn_on_unknown_keys_at_top_level(tmpdir, caplog): f = tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo:", "test_minimum_pre_commit_version_passing(): cfg = {'repos': [], 'minimum_pre_commit_version': '0'} cfgv.validate(cfg, CONFIG_SCHEMA) @pytest.mark.parametrize('schema',", "assert dct == {'repo': 'a', 'rev': 'b'} @pytest.mark.parametrize( 'config_repo', (", "ret = is_valid_according_to_schema(config_obj, CONFIG_SCHEMA) assert ret is expected def test_local_hooks_with_rev_fails():", "assert dct == {'repo': 'a', 'rev': 'b'} def test_migrate_to_sha_ok(): dct", "# not a string for version {'python': 3}, ), )", "( { 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37', 'hooks': [{'id':", "the backing manifest. #227 \"\"\" for item in CONFIG_HOOK_DICT.items: assert", "invalid to set a language for a meta hook {'repo':", "tmpdir.join('cfg.yaml') f.write( 'repos:\\n' '- repo: https://gitlab.com/pycqa/flake8\\n' ' rev: 3.7.7\\n' '", "CONFIG_SCHEMA) assert ret is expected def test_local_hooks_with_rev_fails(): config_obj = {'repos':", "'meta', 'hooks': [{'id': 'identity', 'language': 'python'}]}, # name override must", "msg, = excinfo.value.args assert msg == 'Cannot specify both sha", "'expected'), ( ( { 'repos': [{ 'repo': 'git<EMAIL>:pre-commit/pre-commit-hooks', 'rev': 'cd74dc150c142c3be70b24eaf0b02cae9d235f37',", "}], True, ), ( [{ 'id': 'a', 'name': 'b', 'entry':", "import check_type_tag from pre_commit.clientlib import CONFIG_HOOK_DICT from pre_commit.clientlib import CONFIG_REPO_DICT", "'language_version': 'python3.4', 'files': r'\\.py$', }], True, ), ( # A", "in schema.items if hasattr(item, 'key')} warn_additional, = [ x for", "be string {'repo': 'meta', 'hooks': [{'id': 'identity', 'name': False}]}, )," ]
[ "2.0 (the \"License\"); # you may not use this file", "wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font =", "for key in keys: output = output_dict.get(key) if not output:", "| wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25)) self.misc_panel.SetSizer(self.misc_panel_sizer) self.misc_panel.SetupScrolling() self.notebook.InsertPage(index, self.misc_panel, _('Misc.'))", "self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def", "in outputs: output_dict[output.__class__] = output # Keys for outputs in", "button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs)", "of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless", "# ====== # Copyright (C) 2015 <NAME> # # Licensed", "+= 1 # Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY,", "= wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL,", "propagates # capture's source change to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED,", "self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT,", "limitations under the License. # import gettext import wx import", "import gettext import wx import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events", "= wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font", "self.frame: return self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500))", "self.frame. This Bind propagates # capture's source change to the", "change to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI", "key in output_dict.keys(): if key in misc_keys: continue if key", "in keys: keys.append(key) # Main tabs index = 0 for", "use this file except in compliance with the License. #", "= output_dict.get(key) if not output: continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel,", "self._init_frame() def _init_frame(self): if self.frame: return self.frame = wx.Frame( self.ikalog_gui.frame,", "= wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook = wx.Notebook(self.frame,", "default_font = self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for", "the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required", "def _init_frame(self): if self.frame: return self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY,", "License. # You may obtain a copy of the License", "wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font", "of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self): if not", "for key in misc_keys: output = output_dict.get(key) if not output:", "under the License is distributed on an \"AS IS\" BASIS,", "tab. misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer,", "Copyright (C) 2015 <NAME> # # Licensed under the Apache", "License for the specific language governing permissions and # limitations", "wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event handlers for buttons.", "button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button( self.frame,", "misc_keys: continue if key not in keys: keys.append(key) # Main", "not output: continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font)", "self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in", "* from ikalog.ui import VideoCapture from ikalog.utils import * _", "key in misc_keys: output = output_dict.get(key) if not output: continue", "ikalog.outputs.WebSocketServer, ] for key in output_dict.keys(): if key in misc_keys:", "Apply button button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use a", "wx.ID_ANY, _(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer", "{} for output in outputs: output_dict[output.__class__] = output # Keys", "output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index += 1 #", "class OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui self.frame =", "self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event):", "Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer", "in compliance with the License. # You may obtain a", "output_dict[output.__class__] = output # Keys for outputs in the main", "[self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is a part of", "= wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND |", "software # distributed under the License is distributed on an", "the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI of each", "utf-8 -*- # # IkaLog # ====== # Copyright (C)", "output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index += 1 # Misc tab", "def __init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui self.frame = None self._init_frame()", "self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25)) self.misc_panel.SetSizer(self.misc_panel_sizer) self.misc_panel.SetupScrolling()", "font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY,", "= wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button( self.frame, wx.ID_ANY, _(u'Load", "index = 0 for key in keys: output = output_dict.get(key)", "wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL,", "# Refresh UI of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def", "self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook =", "= wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event handlers for", "# # IkaLog # ====== # Copyright (C) 2015 <NAME>", "# Apply button button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use", "for outputs combined into the misc tab. misc_keys = [", "button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button( self.frame, wx.ID_ANY,", "wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer)", "flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25)) self.misc_panel.SetSizer(self.misc_panel_sizer) self.misc_panel.SetupScrolling() self.notebook.InsertPage(index, self.misc_panel,", "self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is a part of self.frame. This", "buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set", "OF ANY KIND, either express or implied. # See the", "WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.", "ANY KIND, either express or implied. # See the License", "See the License for the specific language governing permissions and", "the License. # You may obtain a copy of the", "at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable", "for the specific language governing permissions and # limitations under", "ikalog.ui import VideoCapture from ikalog.utils import * _ = Localization.gettext_translation('IkaUI',", "to in writing, software # distributed under the License is", "output.panel, flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25)) self.misc_panel.SetSizer(self.misc_panel_sizer) self.misc_panel.SetupScrolling() self.notebook.InsertPage(index,", "VideoCapture from ikalog.utils import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext class", "# See the License for the specific language governing permissions", "language governing permissions and # limitations under the License. #", "ikalog.ui.panel import * from ikalog.ui import VideoCapture from ikalog.utils import", "or agreed to in writing, software # distributed under the", "required by applicable law or agreed to in writing, software", "import * from ikalog.ui.panel import * from ikalog.ui import VideoCapture", "BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either", "] # Keys for outputs combined into the misc tab.", "if key not in keys: keys.append(key) # Main tabs index", "with the License. # You may obtain a copy of", "= [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys for", "* from ikalog.ui.panel import * from ikalog.ui import VideoCapture from", "if not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index +=", "compliance with the License. # You may obtain a copy", "agreed to in writing, software # distributed under the License", "# self.capture.panel is a part of self.frame. This Bind propagates", "# Copyright (C) 2015 <NAME> # # Licensed under the", "if not output: continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name)", "[ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key", "distributed under the License is distributed on an \"AS IS\"", "a part of self.frame. This Bind propagates # capture's source", "button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture] +", "tabs index = 0 for key in keys: output =", "= Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui =", "#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # IkaLog", "the License. # import gettext import wx import wx.lib.scrolledpanel import", "express or implied. # See the License for the specific", "# capture's source change to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized)", "except in compliance with the License. # You may obtain", "= None self._init_frame() def _init_frame(self): if self.frame: return self.frame =", "= output # Keys for outputs in the main page.", "outputs: output_dict[output.__class__] = output # Keys for outputs in the", "Licensed under the Apache License, Version 2.0 (the \"License\"); #", "not use this file except in compliance with the License.", "misc_keys: output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.misc_panel) title", "top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON,", "writing, software # distributed under the License is distributed on", "= button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default", "you may not use this file except in compliance with", "500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY) # Apply button button_apply =", "# Licensed under the Apache License, Version 2.0 (the \"License\");", "= 0 for key in keys: output = output_dict.get(key) if", "= wx.Button( self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply)", "CONDITIONS OF ANY KIND, either express or implied. # See", "self.ikalog_gui = ikalog_gui self.frame = None self._init_frame() def _init_frame(self): if", "outputs in the main page. keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS,", "debug=True) def show(self): if not self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def", "is distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES", "_init_frame(self): if self.frame: return self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"),", "* _ = Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def __init__(self, ikalog_gui):", "__init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui self.frame = None self._init_frame() def", "on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event):", "self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict = {} for output in", "title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25))", "self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI of each plugin. self.ikalog_gui.engine.call_plugins(", "into the misc tab. misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot,", "= wx.Notebook(self.frame, wx.ID_ANY) # Apply button button_apply = wx.Button(self.frame, wx.ID_ANY,", "misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ]", "index += 1 # Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook,", "event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict = {} for output", "for output in outputs: output_dict[output.__class__] = output # Keys for", "self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default)", "# Main tabs index = 0 for key in keys:", "wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in misc_keys: output = output_dict.get(key) if", "ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key in output_dict.keys():", "under the License. # import gettext import wx import wx.lib.scrolledpanel", "= [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for", "OR CONDITIONS OF ANY KIND, either express or implied. #", "return self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook", "the License is distributed on an \"AS IS\" BASIS, #", "Main tabs index = 0 for key in keys: output", "size=(640, 500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY) # Apply button button_apply", "apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button(", "in misc_keys: continue if key not in keys: keys.append(key) #", "import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import * from ikalog.ui.panel", "from ikalog.utils import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object):", "the misc tab. misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi,", "wx.Button( self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel)", "wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use a bold font. apply_font =", "self.frame = None self._init_frame() def _init_frame(self): if self.frame: return self.frame", "button_load_default = wx.Button( self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL)", "ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key in output_dict.keys(): if key", "# Set event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel)", "law or agreed to in writing, software # distributed under", "self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL)", "governing permissions and # limitations under the License. # import", "event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event)", "from ikalog.ui.panel import * from ikalog.ui import VideoCapture from ikalog.utils", "ikalog.outputs from ikalog.ui.events import * from ikalog.ui.panel import * from", "title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in misc_keys:", "source change to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh", "[ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys for outputs", "====== # Copyright (C) 2015 <NAME> # # Licensed under", "self.notebook.InsertPage(index, output.panel, output.panel_name) index += 1 # Misc tab self.misc_panel", "python3 # -*- coding: utf-8 -*- # # IkaLog #", "self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict =", "= self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key", "wx.FONTWEIGHT_BOLD) for key in misc_keys: output = output_dict.get(key) if not", "may obtain a copy of the License at # #", "wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY)", "wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button( self.frame, wx.ID_ANY, _(u'Load default'))", "= {} for output in outputs: output_dict[output.__class__] = output #", "plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self): if not self.frame: self._init_frame()", "key in misc_keys: continue if key not in keys: keys.append(key)", "output in outputs: output_dict[output.__class__] = output # Keys for outputs", "IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,", "buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook)", "may not use this file except in compliance with the", "self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1, 25)) self.misc_panel.SetSizer(self.misc_panel_sizer)", "output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL, border=10) self.misc_panel_sizer.Add((-1,", "# limitations under the License. # import gettext import wx", "WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or", "this file except in compliance with the License. # You", "if key in misc_keys: continue if key not in keys:", "<NAME> # # Licensed under the Apache License, Version 2.0", "def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self,", "in output_dict.keys(): if key in misc_keys: continue if key not", "# # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law", "# # Licensed under the Apache License, Version 2.0 (the", "def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self,", "import ikalog.outputs from ikalog.ui.events import * from ikalog.ui.panel import *", "file except in compliance with the License. # You may", "on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS", "self.on_button_load_default) outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is", "None self._init_frame() def _init_frame(self): if self.frame: return self.frame = wx.Frame(", "self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self,", "wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import * from ikalog.ui.panel import", "] for key in output_dict.keys(): if key in misc_keys: continue", "= output_dict.get(key) if not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name)", "part of self.frame. This Bind propagates # capture's source change", "# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express", "import VideoCapture from ikalog.utils import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext", "ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key in output_dict.keys(): if key in", "self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self): if not self.frame: self._init_frame() self.frame.Show()", "# -*- coding: utf-8 -*- # # IkaLog # ======", "wx.ID_ANY, _(u'Cancel')) button_load_default = wx.Button( self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer", "output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.misc_panel) title =", "wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in misc_keys: output = output_dict.get(key)", "output.panel_name) index += 1 # Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel(", "of self.frame. This Bind propagates # capture's source change to", "ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys for outputs combined", "keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys", "capture's source change to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) #", "# IkaLog # ====== # Copyright (C) 2015 <NAME> #", "http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed", "main page. keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ]", "misc tab. misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack,", "_(\"Options\"), size=(640, 500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY) # Apply button", "not in keys: keys.append(key) # Main tabs index = 0", "Keys for outputs in the main page. keys = [", "preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI of each plugin.", "or implied. # See the License for the specific language", "-*- coding: utf-8 -*- # # IkaLog # ====== #", "KIND, either express or implied. # See the License for", "specific language governing permissions and # limitations under the License.", "continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add(", "Refresh UI of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self):", "License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by", "button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use a bold font.", "is a part of self.frame. This Bind propagates # capture's", "keys: output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index,", "def _init_outputs(self, outputs): output_dict = {} for output in outputs:", "continue if key not in keys: keys.append(key) # Main tabs", "Set event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON,", "(the \"License\"); # you may not use this file except", "IkaLog # ====== # Copyright (C) 2015 <NAME> # #", "# you may not use this file except in compliance", "OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui self.frame = None", "= wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use a bold font. apply_font", "not self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def", "_(u'Cancel')) button_load_default = wx.Button( self.frame, wx.ID_ANY, _(u'Load default')) buttons_sizer =", "each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self): if not self.frame:", "output_dict.keys(): if key in misc_keys: continue if key not in", "self._init_outputs(outputs) # self.capture.panel is a part of self.frame. This Bind", "2015 <NAME> # # Licensed under the Apache License, Version", "wx.ID_ANY) # Apply button button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) #", "ikalog.ui.events import * from ikalog.ui.panel import * from ikalog.ui import", "title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND", "import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def __init__(self,", "# # Unless required by applicable law or agreed to", "import * from ikalog.ui import VideoCapture from ikalog.utils import *", "on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict = {} for", "obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0", "= wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer)", "top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply)", "apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel'))", "Version 2.0 (the \"License\"); # you may not use this", "tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer =", "import wx import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import *", "to the preview. self.ikalog_gui.capture.panel.Bind( EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI of", "keys.append(key) # Main tabs index = 0 for key in", "ikalog_gui): self.ikalog_gui = ikalog_gui self.frame = None self._init_frame() def _init_frame(self):", "implied. # See the License for the specific language governing", "under the Apache License, Version 2.0 (the \"License\"); # you", "ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys for outputs combined into the", "# Keys for outputs in the main page. keys =", "event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict", "default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL)", "fallback=True).gettext class OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui self.frame", "ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] # Keys for outputs combined into", "by applicable law or agreed to in writing, software #", "key not in keys: keys.append(key) # Main tabs index =", "outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is a", "wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD)", "0 for key in keys: output = output_dict.get(key) if not", "in misc_keys: output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.misc_panel)", "for key in output_dict.keys(): if key in misc_keys: continue if", "in keys: output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.notebook)", "= ikalog_gui self.frame = None self._init_frame() def _init_frame(self): if self.frame:", "self.ikalog_gui.on_input_initialized) # Refresh UI of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True)", "= [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is a part", "bold font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame,", "_init_outputs(self, outputs): output_dict = {} for output in outputs: output_dict[output.__class__]", "output # Keys for outputs in the main page. keys", "Use a bold font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel", "an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF", "Unless required by applicable law or agreed to in writing,", "+ self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel is a part of self.frame.", "button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) # self.capture.panel", "def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs): output_dict = {}", "for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs =", "the specific language governing permissions and # limitations under the", "and # limitations under the License. # import gettext import", "def show(self): if not self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self,", "applicable law or agreed to in writing, software # distributed", "in the main page. keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk,", "in writing, software # distributed under the License is distributed", "buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event", "output = output_dict.get(key) if not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel,", "from ikalog.ui.events import * from ikalog.ui.panel import * from ikalog.ui", "ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key in output_dict.keys(): if", "top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) # Set event handlers", "self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def", "buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer = wx.BoxSizer(wx.VERTICAL) top_sizer.Add(self.notebook) top_sizer.Add(buttons_sizer) self.frame.SetSizer(top_sizer) #", "Keys for outputs combined into the misc tab. misc_keys =", "This Bind propagates # capture's source change to the preview.", "from ikalog.ui import VideoCapture from ikalog.utils import * _ =", "360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font = wx.Font(default_font.GetPointSize(),", "License is distributed on an \"AS IS\" BASIS, # WITHOUT", "self.notebook, wx.ID_ANY, size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont()", "License, Version 2.0 (the \"License\"); # you may not use", "ikalog.outputs.Twitter ] # Keys for outputs combined into the misc", "output_dict.get(key) if not output: continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY,", "# You may obtain a copy of the License at", "if not self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event)", "copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #", "continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index += 1 # Misc", "output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel,", "combined into the misc tab. misc_keys = [ ikalog.outputs.CSV, ikalog.outputs.JSON,", "key in keys: output = output_dict.get(key) if not output: continue", "# Keys for outputs combined into the misc tab. misc_keys", "(C) 2015 <NAME> # # Licensed under the Apache License,", "gettext import wx import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import", "# Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640, 360))", "self.notebook = wx.Notebook(self.frame, wx.ID_ANY) # Apply button button_apply = wx.Button(self.frame,", "button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel = wx.Button(self.frame, wx.ID_ANY, _(u'Cancel')) button_load_default =", "the License for the specific language governing permissions and #", "_ = Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui", "wx.ID_ANY, _(u'Apply')) # Use a bold font. apply_font = button_apply.GetFont()", "Apache License, Version 2.0 (the \"License\"); # you may not", "either express or implied. # See the License for the", "wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY) # Apply", "_(u'Apply')) # Use a bold font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD)", "output_dict = {} for output in outputs: output_dict[output.__class__] = output", "self.capture.panel is a part of self.frame. This Bind propagates #", "# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or", "self.frame.Show() self.frame.Raise() def on_button_apply(self, event): self.ikalog_gui.on_options_apply(event) def on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event)", "UI of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context', debug=True) def show(self): if", "output_dict.get(key) if not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index", "coding: utf-8 -*- # # IkaLog # ====== # Copyright", "outputs): output_dict = {} for output in outputs: output_dict[output.__class__] =", "ikalog.outputs.CSV, ikalog.outputs.JSON, ikalog.outputs.Screenshot, ikalog.outputs.Boyomi, ikalog.outputs.Slack, ikalog.outputs.WebSocketServer, ] for key in", "License. # import gettext import wx import wx.lib.scrolledpanel import ikalog.outputs", "1 # Misc tab self.misc_panel = wx.lib.scrolledpanel.ScrolledPanel( self.notebook, wx.ID_ANY, size=(640,", "Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def __init__(self, ikalog_gui): self.ikalog_gui = ikalog_gui", "ikalog.utils import * _ = Localization.gettext_translation('IkaUI', fallback=True).gettext class OptionsGUI(object): def", "a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 #", "show(self): if not self.frame: self._init_frame() self.frame.Show() self.frame.Raise() def on_button_apply(self, event):", "size=(640, 360)) self.misc_panel_sizer = wx.BoxSizer(wx.VERTICAL) default_font = self.misc_panel.GetFont() title_font =", "page. keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter ] #", "on_button_cancel(self, event): self.ikalog_gui.on_options_cancel(event) def on_button_load_default(self, event): self.ikalog_gui.on_options_load_default(event) def _init_outputs(self, outputs):", "output: continue output.on_option_tab_create(self.misc_panel) title = wx.StaticText(self.misc_panel, wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title)", "keys: keys.append(key) # Main tabs index = 0 for key", "self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs self._init_outputs(outputs) #", "the main page. keys = [ ikalog.ui.VideoCapture, ikalog.outputs.OBS, ikalog.outputs.StatInk, ikalog.outputs.Twitter", "wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in misc_keys: output =", "output.panel, output.panel_name) index += 1 # Misc tab self.misc_panel =", "Bind propagates # capture's source change to the preview. self.ikalog_gui.capture.panel.Bind(", "<reponame>fetus-hina/IkaLog #!/usr/bin/env python3 # -*- coding: utf-8 -*- # #", "\"License\"); # you may not use this file except in", "# Use a bold font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font)", "outputs combined into the misc tab. misc_keys = [ ikalog.outputs.CSV,", "self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640, 500)) self.notebook = wx.Notebook(self.frame, wx.ID_ANY) #", "distributed on an \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "button button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply')) # Use a bold", "# distributed under the License is distributed on an \"AS", "# Unless required by applicable law or agreed to in", "EVT_INPUT_INITIALIZED, self.ikalog_gui.on_input_initialized) # Refresh UI of each plugin. self.ikalog_gui.engine.call_plugins( 'on_config_load_from_context',", "ikalog_gui self.frame = None self._init_frame() def _init_frame(self): if self.frame: return", "buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture]", "wx.ID_ANY, output.panel_name) title.SetFont(title_font) self.misc_panel_sizer.Add(title) self.misc_panel_sizer.Add( output.panel, flag=wx.EXPAND | wx.ALL, border=10)", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY", "self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs = [self.ikalog_gui.capture] + self.ikalog_gui.outputs", "= wx.Font(default_font.GetPointSize(), wx.FONTFAMILY_DEFAULT, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD) for key in misc_keys: output", "wx import wx.lib.scrolledpanel import ikalog.outputs from ikalog.ui.events import * from", "# import gettext import wx import wx.lib.scrolledpanel import ikalog.outputs from", "handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default) outputs", "You may obtain a copy of the License at #", "_(u'Load default')) buttons_sizer = wx.BoxSizer(wx.HORIZONTAL) buttons_sizer.Add(button_apply) buttons_sizer.Add(button_cancel) buttons_sizer.Add(button_load_default) top_sizer =", "if self.frame: return self.frame = wx.Frame( self.ikalog_gui.frame, wx.ID_ANY, _(\"Options\"), size=(640,", "event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON, self.on_button_cancel) button_load_default.Bind(wx.EVT_BUTTON, self.on_button_load_default)", "not output: continue output.on_option_tab_create(self.notebook) self.notebook.InsertPage(index, output.panel, output.panel_name) index += 1", "wx.Notebook(self.frame, wx.ID_ANY) # Apply button button_apply = wx.Button(self.frame, wx.ID_ANY, _(u'Apply'))", "the Apache License, Version 2.0 (the \"License\"); # you may", "permissions and # limitations under the License. # import gettext", "for outputs in the main page. keys = [ ikalog.ui.VideoCapture,", "'on_config_load_from_context', debug=True) def show(self): if not self.frame: self._init_frame() self.frame.Show() self.frame.Raise()", "a bold font. apply_font = button_apply.GetFont() apply_font.SetWeight(wx.FONTWEIGHT_BOLD) button_apply.SetFont(apply_font) button_cancel =", "self.frame.SetSizer(top_sizer) # Set event handlers for buttons. button_apply.Bind(wx.EVT_BUTTON, self.on_button_apply) button_cancel.Bind(wx.EVT_BUTTON,", "-*- # # IkaLog # ====== # Copyright (C) 2015" ]
[ "f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK',", "= f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB',", "packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI' ], extras_require={'httpx': ['http2']}", "f: long_description = f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>',", "description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests',", "import setup, find_packages with open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description =", "setuptools import setup, find_packages with open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description", "from setuptools import setup, find_packages with open(\"README.md\", 'r',encoding=\"utf-8\") as f:", "long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift',", "author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI'", "author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI' ],", "open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description = f.read() setup( name='LineBot', version='0.1.0',", "find_packages with open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description = f.read() setup(", "setup, find_packages with open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description = f.read()", "'r',encoding=\"utf-8\") as f: long_description = f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB',", "version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0',", "as f: long_description = f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description,", "'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI' ], extras_require={'httpx': ['http2']} )", "long_description = f.read() setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>',", "url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[ 'httpx==0.19.0', 'requests', 'thrift', 'CyberTKAPI' ], extras_require={'httpx':", "setup( name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']),", "with open(\"README.md\", 'r',encoding=\"utf-8\") as f: long_description = f.read() setup( name='LineBot',", "name='LineBot', version='0.1.0', description='Simple-LINELIB', long_description=long_description, author='<NAME>', author_email='<EMAIL>', url='https://github.com/CyberTKR/Simple-LINELIB', packages=find_packages(include=['CyberTK', 'CyberTK.*']), install_requires=[" ]
[ "(get_info,)) return get_info @decorater_log def get_tunnel_if_info(self): ''' Tunnel interface information", "% (get_info,)) return get_info @decorater_log def get_pppoe_info(self): ''' PPPoE information", "ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe = %s\" %", "= self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route in routes: tmp_item =", "ec_message=None, db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message", "tunnel_if = %s\" % (get_info,)) return get_info @decorater_log def get_pppoe_info(self):", "@decorater_log def __init__(self, device_name=None, ec_message=None, db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB,", "''' import GlobalModule from EmCommonLog import decorater_log from DriverSetParameterECDB import", "tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" % (get_info,))", "tunnel in tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"]", "is acquired. ''' get_info = {} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get", "utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and Telephone Corporation", "ip_address:str, subnet_mask:str, gateway_address:str }] } ''' get_info = {} tmp_list", "''' Static route information is acquired. acquired dict: { static_route:[{", "(get_info,)) return get_info @decorater_log def get_pppoe_info(self): ''' PPPoE information is", "acquired dict: { static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }] } '''", "}] } ''' get_info = {} tmp_list = [] routes", "tmp_item = {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"]", "route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" %", "= {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] =", "= tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel", "class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for Cgwsh driver configuration '''", "''' Parameter module for Cgwsh driver configuration ''' import GlobalModule", "}] } ''' get_info = {} tmp_list = [] tunnel_uni", "self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route in routes: tmp_item = {}", "= tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if =", "route information is acquired. acquired dict: { static_route:[{ ip_address:str, subnet_mask:str,", "@decorater_log def get_pppoe_info(self): ''' PPPoE information is acquired. acquired dict:", "ppp_infos: tmp_item = {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\")", "GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" % (get_info,)) return get_info @decorater_log def", "def get_pppoe_info(self): ''' PPPoE information is acquired. acquired dict: {", "information is acquired. acquired dict: { pppoe:[{ username:str, password:str, tenant:str,", "get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" % (get_info,)) return", "@decorater_log def get_tunnel_if_info(self): ''' Tunnel interface information is acquired. acquired", "Cgwsh driver configuration ''' @decorater_log def __init__(self, device_name=None, ec_message=None, db_info=None):", "information is acquired. acquired dict: { tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str,", "CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh driver configuration ''' import", "decorater_log from DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class", "-*- coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and", "gateway_address:str }] } ''' get_info = {} tmp_list = []", "{} tmp_list = [] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for", "= {} tmp_list = [] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ())", "ppp_info in ppp_infos: tmp_item = {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"]", "\"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" %", "information is acquired. ''' get_info = {} get_info[\"device_name\"] = self.ec_message.get(\"name\")", "uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"]", "tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" % (get_info,)) return get_info @decorater_log", "\"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id", "in routes: tmp_item = {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] =", "''' PPPoE information is acquired. acquired dict: { pppoe:[{ username:str,", "get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe = %s\" % (get_info,)) return", "{ tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] } '''", "def __init__(self, device_name=None, ec_message=None, db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name,", "tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item)", "tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe =", "= %s\" % (get_info,)) return get_info @decorater_log def get_static_route_info(self): '''", "% (get_info,)) return get_info @decorater_log def get_static_route_info(self): ''' Static route", "from DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for", "tunnel_source:str, }] } ''' get_info = {} tmp_list = []", "return get_info @decorater_log def get_tunnel_if_info(self): ''' Tunnel interface information is", "= {} tmp_list = [] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ())", "tmp_item = {} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"]", "tmp_list = [] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info", "= route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get", "uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo: tmp_item = {}", "<PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] =", "= ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] =", "(get_info,)) return get_info @decorater_log def get_static_route_info(self): ''' Static route information", "@decorater_log def get_static_route_info(self): ''' Static route information is acquired. acquired", "vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id", "Nippon Telegraph and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter", "get_pppoe_info(self): ''' PPPoE information is acquired. acquired dict: { pppoe:[{", "dict: { static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }] } ''' get_info", "@decorater_log def get_management_info(self): ''' Management information is acquired. ''' get_info", "tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if", "tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe = %s\" % (get_info,))", "tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\")", "''' get_info = {} tmp_list = [] tunnel_uni = self.ec_message.get(\"serviceInfo\",", "for route in routes: tmp_item = {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\")", "pppoe:[{ username:str, password:str, tenant:str, pp_no:str }] } ''' get_info =", "= self.ec_message[\"device\"] @decorater_log def get_service_info(self): ''' Service information is acquired.", "super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log def get_service_info(self):", "''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message = self.ec_message[\"device\"]", "pass @decorater_log def get_management_info(self): ''' Management information is acquired. '''", "= tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" % (get_info,)) return get_info", "import GlobalModule from EmCommonLog import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB", "Parameter module for Cgwsh driver configuration ''' import GlobalModule from", "from EmCommonLog import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB):", "coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph and Telephone", "python # -*- coding: utf-8 -*- # Copyright(c) 2019 Nippon", "route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] =", "@decorater_log def get_service_info(self): ''' Service information is acquired. ''' pass", "{} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\")", "= vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] =", "= uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list", "get_service_info(self): ''' Service information is acquired. ''' pass @decorater_log def", "= [] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route in", "static_route = %s\" % (get_info,)) return get_info @decorater_log def get_tunnel_if_info(self):", "{} tmp_list = [] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo", "tmp_list = [] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo =", "2019 Nippon Telegraph and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py '''", "interface information is acquired. acquired dict: { tunnel_if:[{ vrf_name:str, if_name:str,", "''' @decorater_log def __init__(self, device_name=None, ec_message=None, db_info=None): ''' Constructor '''", "get_management_info(self): ''' Management information is acquired. ''' get_info = {}", "DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for Cgwsh", "get_info @decorater_log def get_pppoe_info(self): ''' PPPoE information is acquired. acquired", "tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\")", "self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" % (get_info,)) return get_info @decorater_log", "} ''' get_info = {} tmp_list = [] routes =", "tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] =", "{} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\")", "{ static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }] } ''' get_info =", "[] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\",", "PPPoE information is acquired. acquired dict: { pppoe:[{ username:str, password:str,", "''' pass @decorater_log def get_management_info(self): ''' Management information is acquired.", "username:str, password:str, tenant:str, pp_no:str }] } ''' get_info = {}", "Cgwsh driver configuration ''' import GlobalModule from EmCommonLog import decorater_log", "for Cgwsh driver configuration ''' @decorater_log def __init__(self, device_name=None, ec_message=None,", "self.ec_message = self.ec_message[\"device\"] @decorater_log def get_service_info(self): ''' Service information is", "tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"]", "GlobalModule.EM_LOGGER.debug(\"get static_route = %s\" % (get_info,)) return get_info @decorater_log def", "{}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id =", "vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] } ''' get_info =", "and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for", "is acquired. acquired dict: { tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str,", "password:str, tenant:str, pp_no:str }] } ''' get_info = {} tmp_list", "db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message =", "def get_tunnel_if_info(self): ''' Tunnel interface information is acquired. acquired dict:", "self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info in ppp_infos: tmp_item = {}", "routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route in routes: tmp_item", "''' Management information is acquired. ''' get_info = {} get_info[\"device_name\"]", "information is acquired. acquired dict: { static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str", "self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\")", "-*- # Copyright(c) 2019 Nippon Telegraph and Telephone Corporation #", "acquired. ''' get_info = {} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info", "static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }] } ''' get_info = {}", "()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\")", "{ pppoe:[{ username:str, password:str, tenant:str, pp_no:str }] } ''' get_info", "= tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"]", "tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item)", "EmCommonLog import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): '''", "acquired. ''' pass @decorater_log def get_management_info(self): ''' Management information is", "= self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name =", "tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route =", "''' get_info = {} tmp_list = [] ppp_infos = self.ec_message.get(\"serviceInfo\",", "= self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" % (get_info,)) return get_info", "if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] } ''' get_info = {}", "= tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo: tmp_item", "class for Cgwsh driver configuration ''' @decorater_log def __init__(self, device_name=None,", "tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get(", "''' Parameter class for Cgwsh driver configuration ''' @decorater_log def", "information is acquired. ''' pass @decorater_log def get_management_info(self): ''' Management", "}] } ''' get_info = {} tmp_list = [] ppp_infos", "acquired dict: { pppoe:[{ username:str, password:str, tenant:str, pp_no:str }] }", "''' Service information is acquired. ''' pass @decorater_log def get_management_info(self):", "# -*- coding: utf-8 -*- # Copyright(c) 2019 Nippon Telegraph", "{} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name", "tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\"", "= {} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" %", "''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log def", "= [] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get(", "uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get", "<filename>lib/SeparateDriver/CgwshDeviceDriverSetParameterECDB.py #!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright(c)", "()) for route in routes: tmp_item = {} tmp_item[\"ip_address\"] =", "for Cgwsh driver configuration ''' import GlobalModule from EmCommonLog import", "is acquired. acquired dict: { pppoe:[{ username:str, password:str, tenant:str, pp_no:str", "DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for Cgwsh driver configuration", "def get_service_info(self): ''' Service information is acquired. ''' pass @decorater_log", "()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\")", "in ppp_infos: tmp_item = {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] =", "= {} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] =", "get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" % (get_info,)) return", "Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log", "dict: { tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] }", "Copyright(c) 2019 Nippon Telegraph and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py", "tmp_item = {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"]", "vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for", "= <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"]", "%s\" % (get_info,)) return get_info @decorater_log def get_static_route_info(self): ''' Static", "{} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" % (get_info,))", "acquired. acquired dict: { pppoe:[{ username:str, password:str, tenant:str, pp_no:str }]", "tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name = tunnel_uni.get(\"vrfName\") uni_if_name", "management_info = %s\" % (get_info,)) return get_info @decorater_log def get_static_route_info(self):", "tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo: tmp_item =", "= %s\" % (get_info,)) return get_info @decorater_log def get_pppoe_info(self): '''", "route in routes: tmp_item = {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"]", "[] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info in ppp_infos:", "driver configuration ''' @decorater_log def __init__(self, device_name=None, ec_message=None, db_info=None): '''", "Telegraph and Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module", "def get_static_route_info(self): ''' Static route information is acquired. acquired dict:", "tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"]", "CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for Cgwsh driver configuration ''' @decorater_log", "self.ec_message[\"device\"] @decorater_log def get_service_info(self): ''' Service information is acquired. '''", "is acquired. ''' pass @decorater_log def get_management_info(self): ''' Management information", "db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log def get_service_info(self): ''' Service information", "pp_no:str }] } ''' get_info = {} tmp_list = []", "= self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info in ppp_infos: tmp_item =", "= tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe = %s\" % (get_info,)) return get_info", "device_name=None, ec_message=None, db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message, db_info)", "GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" % (get_info,)) return get_info @decorater_log def", "Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh driver", "routes: tmp_item = {} tmp_item[\"ip_address\"] = route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\")", "{}).get(\"staticRoute\", ()) for route in routes: tmp_item = {} tmp_item[\"ip_address\"]", "Service information is acquired. ''' pass @decorater_log def get_management_info(self): '''", "is acquired. acquired dict: { static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }]", "= self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ())", "tenant:str, pp_no:str }] } ''' get_info = {} tmp_list =", "{}).get(\"pppInfo\", ()) for ppp_info in ppp_infos: tmp_item = {} tmp_item[\"username\"]", "get_info @decorater_log def get_tunnel_if_info(self): ''' Tunnel interface information is acquired.", "tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list", "driver configuration ''' import GlobalModule from EmCommonLog import decorater_log from", "} ''' get_info = {} tmp_list = [] ppp_infos =", "get_info = {} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\"", "''' get_info = {} tmp_list = [] routes = self.ec_message.get(\"serviceInfo\",", "Telephone Corporation # Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh", "tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] } ''' get_info", "return get_info @decorater_log def get_static_route_info(self): ''' Static route information is", "} ''' get_info = {} tmp_list = [] tunnel_uni =", "tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\",", "import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter class for Cgwsh driver", "get_tunnel_if_info(self): ''' Tunnel interface information is acquired. acquired dict: {", "()) for ppp_info in ppp_infos: tmp_item = {} tmp_item[\"username\"] =", "''' Tunnel interface information is acquired. acquired dict: { tunnel_if:[{", "% (get_info,)) return get_info @decorater_log def get_tunnel_if_info(self): ''' Tunnel interface", "= ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get", "# Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh driver configuration", "uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo:", "= [] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info in", "get_info = {} tmp_list = [] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\",", "#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright(c) 2019", "configuration ''' @decorater_log def __init__(self, device_name=None, ec_message=None, db_info=None): ''' Constructor", "ec_message, db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log def get_service_info(self): ''' Service", "[] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route in routes:", "tunnel_uni.get(\"vrfName\") uni_if_name = tunnel_uni.get(\"ifName\") uni_vlan_id = tunnel_uni.get(\"vlanId\") for tunnel in", "ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe", "acquired dict: { tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }]", "self.ec_message.get(\"serviceInfo\", {}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name", "tmp_list = [] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ()) for route", "import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB class CgwshDeviceDriverSetParameterECDB(DriverSetParameterECDB): ''' Parameter", "%s\" % (get_info,)) return get_info @decorater_log def get_tunnel_if_info(self): ''' Tunnel", "Static route information is acquired. acquired dict: { static_route:[{ ip_address:str,", "get_info = {} tmp_list = [] tunnel_uni = self.ec_message.get(\"serviceInfo\", {}).get(\"uni\",", "tunnel_uni.get(\"vlanId\") for tunnel in tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"] =", "get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info = %s\" % (get_info,)) return", "# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation # Filename:", "get_info = {} tmp_list = [] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\",", "for ppp_info in ppp_infos: tmp_item = {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\")", "GlobalModule from EmCommonLog import decorater_log from DriverSetParameterECDB import DriverSetParameterECDB class", "= {} tmp_list = [] routes = self.ec_message.get(\"serviceInfo\", {}).get(\"staticRoute\", ())", "Parameter class for Cgwsh driver configuration ''' @decorater_log def __init__(self,", "= route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route = %s\"", "acquired. acquired dict: { static_route:[{ ip_address:str, subnet_mask:str, gateway_address:str }] }", "= ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get pppoe = %s\"", "Tunnel interface information is acquired. acquired dict: { tunnel_if:[{ vrf_name:str,", "uni_vlan_id:str, tunnel_source:str, }] } ''' get_info = {} tmp_list =", "{}).get(\"uni\", ()) tunnel_officeInfo = self.ec_message.get( \"serviceInfo\", {}).get(\"officeInfo\", ()) vrf_name =", "return get_info @decorater_log def get_pppoe_info(self): ''' PPPoE information is acquired.", "ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for ppp_info in ppp_infos: tmp_item", "= route.get(\"ipAddress\") tmp_item[\"subnet_mask\"] = route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"]", "ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\")", "subnet_mask:str, gateway_address:str }] } ''' get_info = {} tmp_list =", "= uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] = tunnel.get( \"tunnelSrcIpAddress\") tmp_list.append(tmp_item)", "__init__(self, device_name=None, ec_message=None, db_info=None): ''' Constructor ''' super(CgwshDeviceDriverSetParameterECDB, self).__init__(device_name, ec_message,", "self).__init__(device_name, ec_message, db_info) self.ec_message = self.ec_message[\"device\"] @decorater_log def get_service_info(self): '''", "{} tmp_list = [] ppp_infos = self.ec_message.get(\"serviceInfo\", {}).get(\"pppInfo\", ()) for", "tmp_item[\"tenant\"] = ppp_info.get(\"corporationId\") tmp_item[\"pp_no\"] = ppp_info.get(\"ppId\") tmp_list.append(tmp_item) get_info[\"pppoe\"] = tmp_list", "%s\" % (get_info,)) return get_info @decorater_log def get_pppoe_info(self): ''' PPPoE", "uni_if_name:str, uni_vlan_id:str, tunnel_source:str, }] } ''' get_info = {} tmp_list", "module for Cgwsh driver configuration ''' import GlobalModule from EmCommonLog", "route.get(\"subnetMask\") tmp_item[\"gateway_address\"] = route.get(\"gatewayIpAddress\") tmp_list.append(tmp_item) get_info[\"static_route\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get static_route", "tmp_list.append(tmp_item) get_info[\"tunnel_if\"] = tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" % (get_info,))", "dict: { pppoe:[{ username:str, password:str, tenant:str, pp_no:str }] } '''", "''' get_info = {} get_info[\"device_name\"] = self.ec_message.get(\"name\") GlobalModule.EM_LOGGER.debug(\"get management_info =", "= %s\" % (get_info,)) return get_info @decorater_log def get_tunnel_if_info(self): '''", "Management information is acquired. ''' get_info = {} get_info[\"device_name\"] =", "= {} tmp_item[\"username\"] = ppp_info.get(\"connectionId\") tmp_item[\"password\"] = <PASSWORD>.get(\"connectionPassword\") tmp_item[\"tenant\"] =", "= tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" % (get_info,)) return get_info", "= tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"] = uni_vlan_id tmp_item[\"tunnel_source\"] =", "for tunnel in tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"] = vrf_name", "in tunnel_officeInfo: tmp_item = {} tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] =", "tmp_item[\"vrf_name\"] = vrf_name tmp_item[\"if_name\"] = tunnel.get(\"tunnelIfName\") tmp_item[\"uni_if_name\"] = uni_if_name tmp_item[\"uni_vlan_id\"]", "tmp_list GlobalModule.EM_LOGGER.debug(\"get tunnel_if = %s\" % (get_info,)) return get_info @decorater_log", "get_static_route_info(self): ''' Static route information is acquired. acquired dict: {", "Filename: CgwshDeviceDriverSetParameterECDB.py ''' Parameter module for Cgwsh driver configuration '''", "acquired. acquired dict: { tunnel_if:[{ vrf_name:str, if_name:str, uni_if_name:str, uni_vlan_id:str, tunnel_source:str,", "def get_management_info(self): ''' Management information is acquired. ''' get_info =", "get_info @decorater_log def get_static_route_info(self): ''' Static route information is acquired.", "configuration ''' import GlobalModule from EmCommonLog import decorater_log from DriverSetParameterECDB" ]
[ "and/or directories. @param directory directory to convert @return JSON representation", "if '-' not in tag: continue str_version, str_dash, str_timestamp =", "matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition != -1: #", "package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while verifying", "the base image :returns: None \"\"\" # Form the path", "image :param str image_name: the name of the image :param", "a given directory into JSON form. The parent object is", "list composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x", "registry path @param usr the functional username for the docker", "cert location # is figured out and we specify it", "list of the actual files and directories in the bundle", "@param zipDirectory where the bundle ZIP is located @param nameOfBundle", "the JSON representation of the actual structure of the uploaded", "be the one with the highest version number (vX) and", "+ \"/\" if expected['children'] == \"_any\": isMatched = isMatched &", "\"FROM \" + docker_url + image_namespace + \"/\" + image_name", "environment variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout", "registry_path + '/tags/list' request = get(tag_list_url, usr, pwd) tag_list =", "actualBundleStructure, \"\") if not bundleIsGood: addToErrorMessages(\"The uploaded bundle does not", "pwd the password for the docker registry functional user @return", "\"\"\" Function to convert objects in a given directory into", "def unzip(zip_file, to_dir): \"\"\" Generic unzip function for extracting zip", "directory can be anything then do no further checking else:", "\"\"\" Function to check if files/directories match. They must have", "-1 for i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition =", "a list then find out if they match anywhere, if", "=============================================== def openJSONfile(jsonFile): \"\"\" Function to open a JSON file", "is defined to be the one with the highest version", "d = {'name': os.path.basename(directory)} # the parent object is dict", "# verify structure of bundle ------------------------------------------------------------------------ # check package stucture", "} # TEMP: Remove the suppressed verification once the docker", "of the actual files and directories in the bundle @param", "url the url of the REST call @param usr the", "+ '/tags/list' request = get(tag_list_url, usr, pwd) tag_list = json.loads(request.text)", "as json_data_file: data = json.load(json_data_file) except: addToErrorMessages(\"The specified JSON file", "it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def", "directory is a list composed of more files/directories d['children'] =", "be unzipped. Could not proceed with deployment.\") # out put", "once the docker cert location # is figured out and", "# do further checking else: # a matching file or", "and return false if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath)", "path @param usr the functional username for the docker registry", "then find out if they match anywhere, if so get", "good or bad @param filePath the zip file to be", "the missing file or directory \"\"\" addToErrorMessages(\"A \"+ expected['type'] +\"", "os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) #", "adds error messages to the global array. @param expected the", "@param registry_path docker registry path @param usr the functional username", "match then verify their children too isMatched = isMatched &", "FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the", "return def zipFileIsGood(filePath): \"\"\" Function to test if a ZIP", "\"\"\" HTTP/HTTPS GET requests using external Python module requests @param", "= \"file\" return d def verifyBundleStructure(expected, actual, currentPath): \"\"\" Function", "return False if type(expected) is list: for k in range(0,len(expected)):", "verify structure of bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure", "return ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to verify if", "subdirectory) return def zipFileIsGood(filePath): \"\"\" Function to test if a", "ret = False return ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function", "expectedElement the expected element. May be defined by regular expression", "bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try:", "isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): \"\"\"", "the uploaded bundle follows IBM defined structure. False otherwise. \"\"\"", "structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip):", "pwd): \"\"\" HTTP/HTTPS GET requests using external Python module requests", "= json.loads(request.text) for tag in tag_list['tags']: if '-' not in", "unzip function for extracting zip files @param zip_file the zip", "further checking else: # a matching file or directory was", "the docker cert location # is figured out and we", "\"\"\" ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None", "can be anything then do no further checking else: isMatched", "currentPath): \"\"\" Function to verify if an uploaded bundle follows", "= open(dockerfile_path, \"w+\") # Format the FROM command dockerfile_from_cmd =", "get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for tag in tag_list['tags']:", "bad @param filePath the zip file to be tested @return", "return false if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return", "badFile is not None: ret = False else: ret =", "GET requests using external Python module requests @param url the", "expected['type'] +\" is missing from the path: \\\"\" + currentPath", "The latest, by definition, is defined to be the one", "Build Scripts A collection of common functions shared by each", "file \"\"\" try: with open(jsonFile) as json_data_file: data = json.load(json_data_file)", "actualElement['type']: ret = True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath,", "tag_list_url = registry_path + '/tags/list' request = get(tag_list_url, usr, pwd)", "the path for the Dockerfile based on the parent of", "to add error messages to the global list of errorMessages", "for the docker registry @param pwd the password for the", "datacenter this script is being run on :param str dockerfile_parent_dir:", "registry_path docker registry path @param usr the functional username for", "and may contain files and/or directories. @param directory directory to", "caller script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the Dockerfile", "of the directory can be anything then do no further", "element. Helper method to verifyBundleStructure. @param actual list of the", "location # is figured out and we specify it in", "JSON file \"\"\" try: with open(jsonFile) as json_data_file: data =", "unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0])", "missing from the path: \\\"\" + currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails'])", "\"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout set for docker client:", "the Dockerfile based on the parent of the caller script", "contains an expected element. Helper method to verifyBundleStructure. @param actual", "zipFileIsGood(bundleZip): try: # copy bundle into new working directory -----------------------------------------------------------", "len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition !=", "don't match then log the error msg and return false", "check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo)", "latest_version < str_version: latest_version = str_version latest_timestamp = dt_timestamp latest_tag", "\"\") if not bundleIsGood: addToErrorMessages(\"The uploaded bundle does not meet", "image_namespace + \"/\" + image_name + \":\" + image_tag_latest #", "the expected element @param currentPath the current path we are", "str_version: continue elif latest_version < str_version: latest_version = str_version latest_timestamp", "of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create", "Function to verify if an uploaded bundle is: 1) a", "user @return the latest image tag \"\"\" tag_list_url = registry_path", "not proceed with deployment.\") # clean up unzipped stuff and", "docker registry path @param usr the functional username for the", "not meet predefined structure. Could not proceed with deployment.\") #", "the actual structure of the uploaded bundle @param currentPath the", "if the contents of the directory can be anything then", "JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not bundleIsGood:", "password for the docker registry functional user @return the latest", "for extracting zip files @param zip_file the zip file to", "datetime import re import operator import unicodedata # global list", "bundle ZIP file \"\"\" print ('Validating bundle structure...') bundleIsGood =", "of Common Functions used by Build Scripts A collection of", "recursively @param zipFileName the zip file to be extracted @param", "def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to verify if an uploaded", "+ image_namespace + \"/\" + image_name + \":\" + image_tag_latest", "= \"directory\" # directory may have children # the children", "or directory was not found if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected,", "module requests @param url the url of the REST call", "stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert", "\"\"\" Function to test if a ZIP file is good", "recursive calls) @return True if the list of actual objects", "jsonFile) raise return data def directoryToJSON(directory): \"\"\" Function to convert", "def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): \"\"\" Creates a dockerfile", "zip file to \"\"\" # update if zipFileName.endswith(\".zip\"): #check if", "return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to unzip a ZIP", "dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the Dockerfile dockerfile =", "expectedElement the expected element to find in the bundle @param", "false if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched", "a valid zip file 2) follows IBM defined structure @param", "with deployment.\") # out put report value , join all", "URL associated with the datacenter this script is being run", "+ docker_url + image_namespace + \"/\" + image_name + \":\"", "paths recursively for error msg) @return True if structure of", "latest_version > str_version: continue elif latest_version < str_version: latest_version =", "\"file\" return d def verifyBundleStructure(expected, actual, currentPath): \"\"\" Function to", "usr, pwd) tag_list = json.loads(request.text) for tag in tag_list['tags']: if", "= os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): \"\"\"", "set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300 \"\"\"", "if an actual list of objects contains an expected element.", "number (vX) and the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker", "Dockerfile dockerfile = open(dockerfile_path, \"w+\") # Format the FROM command", "directory was found if expected['type'] == 'directory': currentPath = currentPath", "we specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers,", "call @param usr the functional username for the docker registry", "to extract the zip file to \"\"\" with zipfile.ZipFile(zip_file, \"r\")", "Function to verify if an actual list of objects contains", "bundle ZIP is located @param nameOfBundle name of the bundle", "if os.path.isdir(directory): d['type'] = \"directory\" # directory may have children", "collection of common functions shared by each individual build scripts.", "\"\"\" tag_list_url = registry_path + '/tags/list' request = get(tag_list_url, usr,", "may contain children if type=directory. A directory is composed of", "datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version latest_timestamp latest_tag except", "Could not proceed with deployment.\") bundleIsGood = False else: bundleIsGood", "They must have the same name and must both be", "NameError: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag", "to the parent directory for the Dockerfile :param str docker_url:", "clean up unzipped stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo)", "auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve", "dt_timestamp latest_tag = tag else: if latest_version > str_version: continue", "True if the ZIP file is good. False otherwise. \"\"\"", "being checked (used to build paths recursively for error msg)", "the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path @param", "error messages to the global array. @param expected the expected", "using the correct docker registry URL associated with the datacenter", "bundle @param currentPath the path currently being checked (used to", "to \"\"\" with zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def", "else: d['type'] = \"file\" return d def verifyBundleStructure(expected, actual, currentPath):", "& verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath):", "predefined structure. Could not proceed with deployment.\") # clean up", "into new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if", "jsonFile path to the JSON file @return the loaded JSON", "(used to build paths recursively for error msg) @param isMatched", "unzip(zip_file, to_dir): \"\"\" Generic unzip function for extracting zip files", "docker registry @param pwd the password for the docker registry", "vX-YYYYMMDD-HHmm. The latest, by definition, is defined to be the", "# convert the unzipped directory to JSON file bundleIsGood =", "False else: bundleIsGood = False addToErrorMessages(\"The uploaded bundle could not", "Create the Dockerfile dockerfile = open(dockerfile_path, \"w+\") # Format the", "# clean up unzipped stuff and package structure Json -------------------------------------------------", "file or directory \"\"\" addToErrorMessages(\"A \"+ expected['type'] +\" is missing", "ret = True try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip()", "of the image :param str image_tag_latest: the latest version tag", "directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if", "verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\" Function to verify if an", "registry functional user @return the latest image tag \"\"\" tag_list_url", "to be tested @return True if the ZIP file is", "so get the matched position elif type(actual) is list: matchedPosition", "nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle into new working", "ValueError: continue try: latest_version latest_timestamp latest_tag except NameError: latest_version =", "addToErrorMessages(\"Exception occurred while verifying bundle structure. Could not proceed with", "no further checking else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'],", "the password for the docker registry functional user @return a", "unicodedata # global list of error messages to keep track", "error msgs errorMessages = [] \"\"\" Collection of Common Functions", "was found if expected['type'] == 'directory': currentPath = currentPath +", "Function to adds error messages to the global array. @param", "\"+ expected['type'] +\" is missing from the path: \\\"\" +", "\"Dockerfile\") # Create the Dockerfile dockerfile = open(dockerfile_path, \"w+\") #", "they match anywhere, if so get the matched position elif", "zip_file the zip file to be extracted @param to_dir the", "error message to add \"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return", "None: ret = False else: ret = True except: ret", "check if files/directories match. They must have the same name", "registry functional user @return a JSON response \"\"\" headers =", "anything then do no further checking else: isMatched = isMatched", "all error msgs errorMessages = [] \"\"\" Collection of Common", "# Write the FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd)", "the contents of the directory can be anything then do", "and its children if type(actual) is dict: isMatched = isMatched", "True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\" Function", "registry @param pwd the password for the docker registry functional", "for the docker registry functional user @return the latest image", "copy bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory,", "a list and may contain files and/or directories. @param directory", "JSON representation of the actual structure of the uploaded bundle", "to convert objects in a given directory into JSON form.", "a dockerfile using the correct docker registry URL associated with", "msg) @return True if structure of the uploaded bundle follows", "verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking else: # a", "the_zip_file.testzip() if badFile is not None: ret = False else:", "follows IBM defined structure. False otherwise. \"\"\" isMatched = True", "range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return", "uploaded bundle could not be unzipped. Could not proceed with", "'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed verification once", "(vX) and the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry", "in os.listdir(directory)] else: d['type'] = \"file\" return d def verifyBundleStructure(expected,", "i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break", "variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout set", "if the list of actual objects contain the expected element", "position elif type(actual) is list: matchedPosition = -1 for i", "the functional username for the docker registry @param pwd the", ":param str docker_url: the docker registry VIP accessible from the", "set for docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\")", "the docker registry functional user @return a JSON response \"\"\"", "dockerfile = open(dockerfile_path, \"w+\") # Format the FROM command dockerfile_from_cmd", "Generic unzip function for extracting zip files @param zip_file the", "type(expected) is list: for k in range(0,len(expected)): isMatched = isMatched", "False otherwise. \"\"\" ret = True try: the_zip_file = zipfile.ZipFile(filePath)", "located @param nameOfBundle name of the bundle ZIP file \"\"\"", "directory to extract the zip file to \"\"\" # update", "matching file or directory was not found if expected['fail-if-not-found'] ==", "JSON representation of a directory \"\"\" d = {'name': os.path.basename(directory)}", "currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to", "directory for the Dockerfile :param str docker_url: the docker registry", "the Dockerfile dockerfile = open(dockerfile_path, \"w+\") # Format the FROM", "ZIP file is good. False otherwise. \"\"\" ret = True", "the bundle @param currentPath the path currently being checked (used", "not valid: \" + jsonFile) raise return data def directoryToJSON(directory):", "2) follows IBM defined structure @param zipDirectory where the bundle", "tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue", "ZIP file is good or bad @param filePath the zip", "based on the parent of the caller script dockerfile_path =", "global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to", "open a JSON file @param jsonFile path to the JSON", "= [] \"\"\" Collection of Common Functions used by Build", "actual structure of the uploaded bundle @param currentPath the path", "Dockerfile based on the parent of the caller script dockerfile_path", "element @param currentPath the current path we are on that", "import operator import unicodedata # global list of error messages", "to the global list of errorMessages @param errorMessage the error", "too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else", "in tag_list['tags']: if '-' not in tag: continue str_version, str_dash,", "except NameError: latest_version = str_version latest_timestamp = dt_timestamp latest_tag =", "if latest_version > str_version: continue elif latest_version < str_version: latest_version", "found if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath) return False if", "= isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def", "unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify", "for the docker registry functional user @return a JSON response", "the uploaded bundle @param currentPath the path currently being checked", "list of error messages to keep track of all error", "# Format the FROM command dockerfile_from_cmd = \"FROM \" +", "\"\"\" addToErrorMessages(\"A \"+ expected['type'] +\" is missing from the path:", "they don't match then log the error msg and return", "\"\"\" Creates a dockerfile using the correct docker registry URL", "version of an image based on its tags: vX-YYYYMMDD-HHmm. The", "verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to verify if an uploaded bundle", "to build paths recursively for error msg) @return True if", "directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo)", "the IBM defined structure @param actual the JSON representation of", "expected element @param currentPath the current path we are on", "else: if latest_version > str_version: continue elif latest_version < str_version:", "x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): \"\"\" Function to", "True if structure of the uploaded bundle follows IBM defined", "expected element. Helper method to verifyBundleStructure. @param actual list of", "uploaded bundle does not meet predefined structure. Could not proceed", "zip file to \"\"\" with zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir)", "addToErrorMessages(\"The specified JSON file is not valid: \" + jsonFile)", "dt_timestamp latest_tag = tag return latest_tag def unzip(zip_file, to_dir): \"\"\"", "of the REST call @param usr the functional username for", "actualElement['name']) is not None and expectedElement['type'] == actualElement['type']: ret =", "latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path @param usr", "ZIP file \"\"\" print ('Validating bundle structure...') bundleIsGood = True", "path to the parent directory for the Dockerfile :param str", "currentPath the path currently being checked (used to build paths", "@param pwd the password for the docker registry functional user", "files and directories in the bundle @param expectedElement the expected", "re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] == actualElement['type']: ret", "@param expected the expected element @param currentPath the current path", "elif latest_version < str_version: latest_version = str_version latest_timestamp = dt_timestamp", "nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0]))", "external Python module requests @param url the url of the", "the expected element. May be defined by regular expression @param", "shared by each individual build scripts. \"\"\" def get(url, usr,", "latest, by definition, is defined to be the one with", "DOCKER_CLIENT_TIMEOUT environment variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The", "zip file to be extracted @param to_dir the destination directory", "the directory can be anything then do no further checking", "\"\"\" d = {'name': os.path.basename(directory)} # the parent object is", "import zipfile import shutil import requests import datetime import re", "latest_timestamp = dt_timestamp latest_tag = tag return latest_tag def unzip(zip_file,", "-1: # if they match then verify their children too", "is a list then find out if they match anywhere,", "return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\" Function to", "\"\"\" with zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir,", "the loaded JSON file \"\"\" try: with open(jsonFile) as json_data_file:", "request = get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for tag", "= get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for tag in", "isMatched def addToErrorMessages(errorMessage): \"\"\" Function to add error messages to", "to_dir): \"\"\" Generic unzip function for extracting zip files @param", "+ currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): \"\"\"", "get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve the latest version of an", "@param usr the functional username for the docker registry @param", "a directory is a list composed of more files/directories d['children']", "if an uploaded bundle is: 1) a valid zip file", "\" + docker_url + image_namespace + \"/\" + image_name +", "of error messages to keep track of all error msgs", "REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr,", "addToErrorMessages(errorMessage): \"\"\" Function to add error messages to the global", "children # the children in a directory is a list", "dockerfile_from_cmd = \"FROM \" + docker_url + image_namespace + \"/\"", "error msg and return false if needed isMatched = False", "used for recursive calls) @return True if the list of", "@return the latest image tag \"\"\" tag_list_url = registry_path +", "logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to adds error messages to the", "add error messages to the global list of errorMessages @param", "currently being checked (used to build paths recursively for error", "\"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\"", "# check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure =", "false if expectedElement['fail-if-not-found'] == \"yes\": # log error msg and", "\" seconds\") # ======================= verify bundle Structure =============================================== def openJSONfile(jsonFile):", "== \"yes\": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is list:", "element to find in the bundle @param currentPath the path", "== actualElement['type']: ret = True return ret def verifyActualContainsExpectedElement(actual, expectedElement,", "log the error msg and return false if expectedElement['fail-if-not-found'] ==", "= True if type(expected) is dict: if matches(expected,actual): # a", "error msg) @param isMatched (only used for recursive calls) @return", "True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy", "if matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition != -1:", "or directory \"\"\" addToErrorMessages(\"A \"+ expected['type'] +\" is missing from", "the one with the highest version number (vX) and the", "= str_version latest_timestamp = dt_timestamp latest_tag = tag else: if", "latest_tag = tag else: if latest_timestamp < dt_timestamp: latest_timestamp =", "bundle follows IBM defined structure. False otherwise. \"\"\" isMatched =", "isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected,", "Format the FROM command dockerfile_from_cmd = \"FROM \" + docker_url", "of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)]", "the actual files and directories in the bundle @param expectedElement", "= False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type']", "json import argparse import zipfile import shutil import requests import", "file instance dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment", "structure @param actual the JSON representation of the actual structure", "script is being run on :param str dockerfile_parent_dir: path to", "unzip a ZIP file recursively @param zipFileName the zip file", "@param url the url of the REST call @param usr", "the error msg and return false if expectedElement['fail-if-not-found'] == \"yes\":", "HTTP/HTTPS GET requests using external Python module requests @param url", "object is dict if os.path.isdir(directory): d['type'] = \"directory\" # directory", "errorMessages = [] \"\"\" Collection of Common Functions used by", "currentPath = currentPath + actual['name'] + \"/\" if expected['children'] ==", "\"\"\" Function to add error messages to the global list", "checked (used to build paths recursively for error msg) @param", "zip file 2) follows IBM defined structure @param zipDirectory where", "dict then verify it and its children if type(actual) is", "to check if files/directories match. They must have the same", "together print (\"report=[\" + \". \".join(str(x) for x in errorMessages)", "on that has the missing file or directory \"\"\" addToErrorMessages(\"A", "not bundleIsGood: addToErrorMessages(\"The uploaded bundle does not meet predefined structure.", "keep track of all error msgs errorMessages = [] \"\"\"", "= False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): \"\"\" Function", "os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo,", "from the path: \\\"\" + currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return", "up unzipped stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except:", "= openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped", "docker registry VIP accessible from the mesos slaves :param str", "nameOfBundle): \"\"\" Function to verify if an uploaded bundle is:", "unzipped stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception", "nameOfBundle name of the bundle ZIP file \"\"\" print ('Validating", "expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the", "python3.6 import os import subprocess import json import argparse import", "to test if a ZIP file is good or bad", "# the parent object is dict if os.path.isdir(directory): d['type'] =", "d['type'] = \"file\" return d def verifyBundleStructure(expected, actual, currentPath): \"\"\"", "for docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") #", "& verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a list then", "== 'directory': currentPath = currentPath + actual['name'] + \"/\" if", "shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle),", "== \"yes\": # log error msg and return false if", "or directory was found if expected['type'] == 'directory': currentPath =", "False return ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to verify", "is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if", "@param currentPath the path currently being checked (used to build", "extracted @param directoryToUnzipTo the destination directory to extract the zip", "with deployment.\") bundleIsGood = False else: bundleIsGood = False addToErrorMessages(\"The", "@param jsonFile path to the JSON file @return the loaded", "build paths recursively for error msg) @return True if structure", "in a directory is a list composed of more files/directories", "@return JSON representation of a directory \"\"\" d = {'name':", "IBM defined structure @param zipDirectory where the bundle ZIP is", "with zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url,", "bundleIsGood = False else: bundleIsGood = False addToErrorMessages(\"The uploaded bundle", "to verifyBundleStructure. @param actual list of the actual files and", "def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to unzip a ZIP file", "structure of the uploaded bundle @param currentPath the path currently", "children in a directory is a list composed of more", "actualElement): \"\"\" Function to check if files/directories match. They must", "dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag return latest_tag def", "os import subprocess import json import argparse import zipfile import", "in tag: continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try:", "zipFileName the zip file to be extracted @param directoryToUnzipTo the", "@return True if the ZIP file is good. False otherwise.", "a ZIP file recursively @param zipFileName the zip file to", "to the global array. @param expected the expected element @param", "bundle does not meet predefined structure. Could not proceed with", "error msg) @return True if structure of the uploaded bundle", "= {'name': os.path.basename(directory)} # the parent object is dict if", "print ('Validating bundle structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory,", "objects contains an expected element. Helper method to verifyBundleStructure. @param", "try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version", "a dict, it may contain children if type=directory. A directory", "try: latest_version latest_timestamp latest_tag except NameError: latest_version = str_version latest_timestamp", "Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file instance dockerfile.close() def", "actual element \"\"\" ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is", "out and we specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr,", "to extract the zip file to \"\"\" # update if", "bundleIsGood = False addToErrorMessages(\"The uploaded bundle could not be unzipped.", "\"\"\" Collection of Common Functions used by Build Scripts A", "continue try: latest_version latest_timestamp latest_tag except NameError: latest_version = str_version", "command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open", "import shutil import requests import datetime import re import operator", "= '300' print(\"The timeout set for docker client: \" +", "the same type. @param expectedElement the expected element. May be", "@param nameOfBundle name of the bundle ZIP file \"\"\" print", "str docker_url: the docker registry VIP accessible from the mesos", "directory may have children # the children in a directory", "not be unzipped. Could not proceed with deployment.\") # out", "structure of the uploaded bundle follows IBM defined structure. False", "on :param str dockerfile_parent_dir: path to the parent directory for", "dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual", "be extracted @param to_dir the destination directory to extract the", "image_tag_latest: the latest version tag of the base image :returns:", "for the Dockerfile based on the parent of the caller", "parent object is dict if os.path.isdir(directory): d['type'] = \"directory\" #", "#!/usr/bin/env python3.6 import os import subprocess import json import argparse", "a directory \"\"\" d = {'name': os.path.basename(directory)} # the parent", "bundle structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle) if", "actual[matchedPosition] , currentPath) else : # if they don't match", "the expected element to find in the bundle @param currentPath", "add \"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo):", "VIP accessible from the mesos slaves :param str image_namespace: the", "checking else: # a matching file or directory was not", "actual['name'] + \"/\" if expected['children'] == \"_any\": isMatched = isMatched", "expected the JSON representation of the IBM defined structure @param", "for error msg) @return True if structure of the uploaded", "join all the messages together print (\"report=[\" + \". \".join(str(x)", "to be extracted @param directoryToUnzipTo the destination directory to extract", "run on :param str dockerfile_parent_dir: path to the parent directory", "the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure", "parent of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") #", "= { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed", "its children if type(actual) is dict: isMatched = isMatched &", "isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking else:", "filePath the zip file to be tested @return True if", "IBM defined structure @param expected the JSON representation of the", "@param zipFileName the zip file to be extracted @param directoryToUnzipTo", "import datetime import re import operator import unicodedata # global", "None \"\"\" # Form the path for the Dockerfile based", "i break if matchedPosition != -1: # if they match", "more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else:", "calls) @return True if the list of actual objects contain", "latest image tag \"\"\" tag_list_url = registry_path + '/tags/list' request", "or bad @param filePath the zip file to be tested", "directoryToUnzipTo the destination directory to extract the zip file to", "docker_url, image_namespace, image_name, image_tag_latest): \"\"\" Creates a dockerfile using the", "isMatched = isMatched & True # if the contents of", "('Validating bundle structure...') bundleIsGood = True bundleZip = os.path.join(zipDirectory, nameOfBundle)", "(used to build paths recursively for error msg) @return True", "Write the FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) #", "if an uploaded bundle follows IBM defined structure @param expected", "tags: vX-YYYYMMDD-HHmm. The latest, by definition, is defined to be", "shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while verifying bundle structure. Could not", "type(actual) is list: matchedPosition = -1 for i in range(0,", "+ \"/\" + image_name + \":\" + image_tag_latest # Write", "the image :param str image_tag_latest: the latest version tag of", "list and may contain files and/or directories. @param directory directory", "element \"\"\" # if actual is a dict then verify", "image based on its tags: vX-YYYYMMDD-HHmm. The latest, by definition,", "msgs errorMessages = [] \"\"\" Collection of Common Functions used", "@return the loaded JSON file \"\"\" try: with open(jsonFile) as", "same name and must both be the same type. @param", "else : # if they don't match then log the", "print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function", "directory was not found if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath)", "defined structure @param zipDirectory where the bundle ZIP is located", "expected['type'] == 'directory': currentPath = currentPath + actual['name'] + \"/\"", "path: \\\"\" + currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement,", "def zipFileIsGood(filePath): \"\"\" Function to test if a ZIP file", "of the image :param str image_name: the name of the", "IBM defined structure @param actual the JSON representation of the", "and the latest timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path", "be extracted @param directoryToUnzipTo the destination directory to extract the", "\" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") # ======================= verify bundle", "True if the list of actual objects contain the expected", "have children # the children in a directory is a", "expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function", "expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is", "\"w+\") # Format the FROM command dockerfile_from_cmd = \"FROM \"", "be defined by regular expression @param actualElement the actual element", "expectedElement, currentPath, isMatched): \"\"\" Function to verify if an actual", "and return false if expectedElement['fail-if-not-found'] == \"yes\": # log error", "was not found if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath) return", "needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage):", "= datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version latest_timestamp latest_tag", ", join all the messages together print (\"report=[\" + \".", "if type(actual) is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath)", "the image :param str image_name: the name of the image", "in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x", "children if type(actual) is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual,", "using external Python module requests @param url the url of", "files @param zip_file the zip file to be extracted @param", "image tag \"\"\" tag_list_url = registry_path + '/tags/list' request =", "(YYYYMMDD-HHmm). @param registry_path docker registry path @param usr the functional", "Function to add error messages to the global list of", "could not be unzipped. Could not proceed with deployment.\") #", "checking else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) #", "if a ZIP file is good or bad @param filePath", "expected element. May be defined by regular expression @param actualElement", "image :param str image_tag_latest: the latest version tag of the", "verifyBundleStructure(expected, actual, currentPath): \"\"\" Function to verify if an uploaded", "verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : # if they don't", "isMatched): \"\"\" Function to verify if an actual list of", "functional user @return a JSON response \"\"\" headers = {", "username for the docker registry @param pwd the password for", "by Build Scripts A collection of common functions shared by", "# if actual is a dict then verify it and", "not proceed with deployment.\") bundleIsGood = False else: bundleIsGood =", "\"directory\" # directory may have children # the children in", "addToErrorMessages(\"The uploaded bundle does not meet predefined structure. Could not", "if the ZIP file is good. False otherwise. \"\"\" ret", "match. They must have the same name and must both", "to unzip a ZIP file recursively @param zipFileName the zip", "of a list and may contain files and/or directories. @param", "tag \"\"\" tag_list_url = registry_path + '/tags/list' request = get(tag_list_url,", "functional username for the docker registry @param pwd the password", "Dockerfile :param str docker_url: the docker registry VIP accessible from", "the matched position elif type(actual) is list: matchedPosition = -1", "the latest image tag \"\"\" tag_list_url = registry_path + '/tags/list'", "missing file or directory \"\"\" addToErrorMessages(\"A \"+ expected['type'] +\" is", "latest_timestamp latest_tag except NameError: latest_version = str_version latest_timestamp = dt_timestamp", "string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file", "if expected['children'] == \"_any\": isMatched = isMatched & True #", "error messages to the global list of errorMessages @param errorMessage", "file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not bundleIsGood: addToErrorMessages(\"The", "the zip file to be extracted @param directoryToUnzipTo the destination", "directory into JSON form. The parent object is always a", "the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file instance dockerfile.close()", "to_dir the destination directory to extract the zip file to", "addToErrorMessages(\"The uploaded bundle could not be unzipped. Could not proceed", "logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is list: for k", "the zip file to be tested @return True if the", "directory \"\"\" d = {'name': os.path.basename(directory)} # the parent object", "actual is a dict then verify it and its children", "def directoryToJSON(directory): \"\"\" Function to convert objects in a given", "actualElement the actual element \"\"\" ret = False if re.fullmatch(expectedElement['name'],", "for x in os.listdir(directory)] else: d['type'] = \"file\" return d", "os.listdir(directory)] else: d['type'] = \"file\" return d def verifyBundleStructure(expected, actual,", "the global list of errorMessages @param errorMessage the error message", "path currently being checked (used to build paths recursively for", "with deployment.\") # clean up unzipped stuff and package structure", "dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try: latest_version latest_timestamp", "return isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to adds error", "------------------------------------------------------------------------ # check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure", "find out if they match anywhere, if so get the", "a JSON response \"\"\" headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', }", "str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except", "\"\"\" # Form the path for the Dockerfile based on", "\"\"\" Function to adds error messages to the global array.", "if type(expected) is list: for k in range(0,len(expected)): isMatched =", ".zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo,", "headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the", "parent directory for the Dockerfile :param str docker_url: the docker", "image_namespace: the name of the image :param str image_name: the", "os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo,", "= verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not bundleIsGood: addToErrorMessages(\"The uploaded bundle", "----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip,", "of the base image :returns: None \"\"\" # Form the", "the latest version tag of the base image :returns: None", "\"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to", "# Create the Dockerfile dockerfile = open(dockerfile_path, \"w+\") # Format", "\":\" + image_tag_latest # Write the FROM command string to", "directory is composed of a list and may contain files", "currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): \"\"\" Function", "it and its children if type(actual) is dict: isMatched =", "functions shared by each individual build scripts. \"\"\" def get(url,", "\"\"\" print ('Validating bundle structure...') bundleIsGood = True bundleZip =", "directories in the bundle @param expectedElement the expected element to", "if needed isMatched = False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def", "to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout set for", "loaded JSON file \"\"\" try: with open(jsonFile) as json_data_file: data", "import os import subprocess import json import argparse import zipfile", "good. False otherwise. \"\"\" ret = True try: the_zip_file =", "verify it and its children if type(actual) is dict: isMatched", "= os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the Dockerfile dockerfile = open(dockerfile_path,", "if zipFileName.endswith(\".zip\"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for", "for recursive calls) @return True if the list of actual", "= os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle))", "# a matching file or directory was not found if", "both be the same type. @param expectedElement the expected element.", "matches(expectedElement, actualElement): \"\"\" Function to check if files/directories match. They", "be the same type. @param expectedElement the expected element. May", "\"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT']", "structure @param zipDirectory where the bundle ZIP is located @param", "where the bundle ZIP is located @param nameOfBundle name of", "= tag else: if latest_version > str_version: continue elif latest_version", "accessible from the mesos slaves :param str image_namespace: the name", "ZIP file recursively @param zipFileName the zip file to be", "to adds error messages to the global array. @param expected", "for the Dockerfile :param str docker_url: the docker registry VIP", "while verifying bundle structure. Could not proceed with deployment.\") bundleIsGood", "expectedElement['fail-if-not-found'] == \"yes\": # log error msg and return false", "bundle follows IBM defined structure @param expected the JSON representation", "dict: if matches(expected,actual): # a matching file or directory was", "extracting zip files @param zip_file the zip file to be", "str image_tag_latest: the latest version tag of the base image", "# if they don't match then log the error msg", "method to verifyBundleStructure. @param actual list of the actual files", "msg and return false if expectedElement['fail-if-not-found'] == \"yes\": # log", "verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\"", "currentPath) # if actual is a list then find out", "latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag return", "actual files and directories in the bundle @param expectedElement the", "definition, is defined to be the one with the highest", "contents of the directory can be anything then do no", "if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo):", "bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not bundleIsGood: addToErrorMessages(\"The uploaded", "os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): \"\"\" Function", "= tag return latest_tag def unzip(zip_file, to_dir): \"\"\" Generic unzip", "the bundle ZIP is located @param nameOfBundle name of the", "is a dict then verify it and its children if", "# if the contents of the directory can be anything", "then log the error msg and return false if expectedElement['fail-if-not-found']", "an expected element. Helper method to verifyBundleStructure. @param actual list", "zip file to be tested @return True if the ZIP", "& verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : # if they", "return isMatched def addToErrorMessages(errorMessage): \"\"\" Function to add error messages", "is: 1) a valid zip file 2) follows IBM defined", "ret = True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched):", "# ======================= verify bundle Structure =============================================== def openJSONfile(jsonFile): \"\"\" Function", "uploaded bundle follows IBM defined structure @param expected the JSON", "@param to_dir the destination directory to extract the zip file", "file to \"\"\" with zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close()", "file @param jsonFile path to the JSON file @return the", "a matching file or directory was found if expected['type'] ==", "common functions shared by each individual build scripts. \"\"\" def", "timestamp (YYYYMMDD-HHmm). @param registry_path docker registry path @param usr the", "d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type'] =", "shutil import requests import datetime import re import operator import", "the bundle @param expectedElement the expected element to find in", "is figured out and we specify it in REQUESTS_CA_BUNDLE return", "have the same name and must both be the same", "\"\"\" isMatched = True if type(expected) is dict: if matches(expected,actual):", "occurred while verifying bundle structure. Could not proceed with deployment.\")", "< dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag return latest_tag", "zip files @param zip_file the zip file to be extracted", "if expectedElement['fail-if-not-found'] == \"yes\": # log error msg and return", "True # if the contents of the directory can be", "client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") # ======================= verify", "we are on that has the missing file or directory", "for i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i", "bundle @param expectedElement the expected element to find in the", "are on that has the missing file or directory \"\"\"", "= tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError:", "if they match anywhere, if so get the matched position", "tag return latest_tag def unzip(zip_file, to_dir): \"\"\" Generic unzip function", "dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment variable to", "the parent directory for the Dockerfile :param str docker_url: the", "@param filePath the zip file to be tested @return True", "deployment.\") # out put report value , join all the", "Function to test if a ZIP file is good or", "matchedPosition = i break if matchedPosition != -1: # if", "to the JSON file @return the loaded JSON file \"\"\"", "'-' not in tag: continue str_version, str_dash, str_timestamp = tag.partition('-')", "(only used for recursive calls) @return True if the list", "tag_list['tags']: if '-' not in tag: continue str_version, str_dash, str_timestamp", "to verify if an uploaded bundle follows IBM defined structure", "recursively for error msg) @return True if structure of the", "name and must both be the same type. @param expectedElement", "& verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking else: #", "the current path we are on that has the missing", "otherwise. \"\"\" isMatched = True if type(expected) is dict: if", "bundle is: 1) a valid zip file 2) follows IBM", "@param errorMessage the error message to add \"\"\" print(errorMessage) global", "bundleIsGood: addToErrorMessages(\"The uploaded bundle does not meet predefined structure. Could", "in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched)", "\"/\" if expected['children'] == \"_any\": isMatched = isMatched & True", "the Dockerfile :param str docker_url: the docker registry VIP accessible", "# the children in a directory is a list composed", "path we are on that has the missing file or", "'/tags/list' request = get(tag_list_url, usr, pwd) tag_list = json.loads(request.text) for", "= True return ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\"", "out if they match anywhere, if so get the matched", "is being run on :param str dockerfile_parent_dir: path to the", "headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve the latest", "of the IBM defined structure @param actual the JSON representation", "else: ret = True except: ret = False return ret", "each individual build scripts. \"\"\" def get(url, usr, pwd): \"\"\"", "@return a JSON response \"\"\" headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json',", "except: addToErrorMessages(\"Exception occurred while verifying bundle structure. Could not proceed", "open(jsonFile) as json_data_file: data = json.load(json_data_file) except: addToErrorMessages(\"The specified JSON", "proceed with deployment.\") # clean up unzipped stuff and package", "the JSON file @return the loaded JSON file \"\"\" try:", "be anything then do no further checking else: isMatched =", "= [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type'] = \"file\"", "tag: continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp", "logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): \"\"\" Function to add", "Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while verifying bundle structure.", "zipDirectory where the bundle ZIP is located @param nameOfBundle name", "is missing from the path: \\\"\" + currentPath + \"\\\"\")", "name of the image :param str image_name: the name of", "track of all error msgs errorMessages = [] \"\"\" Collection", "@param actualElement the actual element \"\"\" ret = False if", "message to add \"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def", "usr the functional username for the docker registry @param pwd", "\" + jsonFile) raise return data def directoryToJSON(directory): \"\"\" Function", "structure @param expected the JSON representation of the IBM defined", "if not bundleIsGood: addToErrorMessages(\"The uploaded bundle does not meet predefined", "list of objects contains an expected element. Helper method to", "Could not proceed with deployment.\") # clean up unzipped stuff", "list: matchedPosition = -1 for i in range(0, len(actual)): if", "continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp =", "image_name + \":\" + image_tag_latest # Write the FROM command", "verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not bundleIsGood: addToErrorMessages(\"The uploaded bundle does", "actual list of the actual files and directories in the", "import json import argparse import zipfile import shutil import requests", "@param expected the JSON representation of the IBM defined structure", "JSON file is not valid: \" + jsonFile) raise return", "= dt_timestamp latest_tag = tag return latest_tag def unzip(zip_file, to_dir):", "timeout set for docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \"", "bundle structure. Could not proceed with deployment.\") bundleIsGood = False", ":param str image_namespace: the name of the image :param str", "import unicodedata # global list of error messages to keep", "verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a list then find", "suppressed verification once the docker cert location # is figured", "not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle", "convert objects in a given directory into JSON form. The", "Collection of Common Functions used by Build Scripts A collection", "based on its tags: vX-YYYYMMDD-HHmm. The latest, by definition, is", "and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while", "actual, currentPath): \"\"\" Function to verify if an uploaded bundle", "# out put report value , join all the messages", "of bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory,", "in a given directory into JSON form. The parent object", "latest version tag of the base image :returns: None \"\"\"", "structure. Could not proceed with deployment.\") bundleIsGood = False else:", "docker registry functional user @return a JSON response \"\"\" headers", "unzipped. Could not proceed with deployment.\") # out put report", "messages to the global list of errorMessages @param errorMessage the", "requests using external Python module requests @param url the url", "to open a JSON file @param jsonFile path to the", "os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile", "\"\"\" Function to open a JSON file @param jsonFile path", "Scripts A collection of common functions shared by each individual", "zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): \"\"\"", "image :returns: None \"\"\" # Form the path for the", "argparse import zipfile import shutil import requests import datetime import", "def matches(expectedElement, actualElement): \"\"\" Function to check if files/directories match.", "proceed with deployment.\") bundleIsGood = False else: bundleIsGood = False", "list of actual objects contain the expected element \"\"\" #", "the destination directory to extract the zip file to \"\"\"", "+ \":\" + image_tag_latest # Write the FROM command string", "---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle", "out put report value , join all the messages together", "the REST call @param usr the functional username for the", "do no further checking else: isMatched = isMatched & verifyBundleStructure(expected['children'],", "pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve the", "# directory may have children # the children in a", "used by Build Scripts A collection of common functions shared", "the parent object is dict if os.path.isdir(directory): d['type'] = \"directory\"", "files/directories match. They must have the same name and must", "d['type'] = \"directory\" # directory may have children # the", "then do no further checking else: isMatched = isMatched &", "type=directory. A directory is composed of a list and may", "FROM command dockerfile_from_cmd = \"FROM \" + docker_url + image_namespace", "in the bundle @param currentPath the path currently being checked", "ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to verify if an", "\"\"\" Retrieve the latest version of an image based on", "if they match then verify their children too isMatched =", ":param str dockerfile_parent_dir: path to the parent directory for the", "seconds\") # ======================= verify bundle Structure =============================================== def openJSONfile(jsonFile): \"\"\"", "return latest_tag def unzip(zip_file, to_dir): \"\"\" Generic unzip function for", "registry VIP accessible from the mesos slaves :param str image_namespace:", "contain the expected element \"\"\" # if actual is a", "destination directory to extract the zip file to \"\"\" #", "= directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON file", "the name of the image :param str image_tag_latest: the latest", "structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while verifying bundle", "of actual objects contain the expected element \"\"\" # if", "= the_zip_file.testzip() if badFile is not None: ret = False", "data = json.load(json_data_file) except: addToErrorMessages(\"The specified JSON file is not", "'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed verification once the", "get the matched position elif type(actual) is list: matchedPosition =", "file or directory was not found if expected['fail-if-not-found'] == \"yes\":", "IBM defined structure. False otherwise. \"\"\" isMatched = True if", "Functions used by Build Scripts A collection of common functions", "# copy bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo =", "file recursively @param zipFileName the zip file to be extracted", "uploaded bundle @param currentPath the path currently being checked (used", "JSON file @param jsonFile path to the JSON file @return", "is a list composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x))", "with the highest version number (vX) and the latest timestamp", "with the datacenter this script is being run on :param", "return d def verifyBundleStructure(expected, actual, currentPath): \"\"\" Function to verify", "False else: ret = True except: ret = False return", "except: ret = False return ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\"", "files and/or directories. @param directory directory to convert @return JSON", "directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON file bundleIsGood", "actual is a list then find out if they match", "zipfile.ZipFile(zip_file, \"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace,", "Could not proceed with deployment.\") # out put report value", "current path we are on that has the missing file", "path to the JSON file @return the loaded JSON file", "JSON file @return the loaded JSON file \"\"\" try: with", "verifying bundle structure. Could not proceed with deployment.\") bundleIsGood =", "one with the highest version number (vX) and the latest", "report value , join all the messages together print (\"report=[\"", "the datacenter this script is being run on :param str", "# Close the open file instance dockerfile.close() def set_docker_client_timeout(): \"\"\"", "A directory is composed of a list and may contain", "addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): \"\"\" Function to check if", "by each individual build scripts. \"\"\" def get(url, usr, pwd):", "array. @param expected the expected element @param currentPath the current", "@param actual list of the actual files and directories in", "further checking else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath)", "file \"\"\" print ('Validating bundle structure...') bundleIsGood = True bundleZip", "tag else: if latest_version > str_version: continue elif latest_version <", "in the bundle @param expectedElement the expected element to find", "destination directory to extract the zip file to \"\"\" with", "objects contain the expected element \"\"\" # if actual is", "expected the expected element @param currentPath the current path we", "======================= verify bundle Structure =============================================== def openJSONfile(jsonFile): \"\"\" Function to", "{ 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove the suppressed verification", "match anywhere, if so get the matched position elif type(actual)", "1) a valid zip file 2) follows IBM defined structure", "is dict if os.path.isdir(directory): d['type'] = \"directory\" # directory may", "found if expected['type'] == 'directory': currentPath = currentPath + actual['name']", "docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") # =======================", "\"\"\" Function to verify if an uploaded bundle follows IBM", "is dict: if matches(expected,actual): # a matching file or directory", "os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return", "\"r\") as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name,", "= isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : #", "anywhere, if so get the matched position elif type(actual) is", "# unzip the bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) #", "@param currentPath the current path we are on that has", "JSON form. The parent object is always a dict, it", "may have children # the children in a directory is", "and directories in the bundle @param expectedElement the expected element", "\"\"\" ret = True try: the_zip_file = zipfile.ZipFile(filePath) badFile =", "of a directory \"\"\" d = {'name': os.path.basename(directory)} # the", "value , join all the messages together print (\"report=[\" +", "badFile = the_zip_file.testzip() if badFile is not None: ret =", "Function to open a JSON file @param jsonFile path to", "file or directory was found if expected['type'] == 'directory': currentPath", "unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to unzip a ZIP file recursively", "if actual is a list then find out if they", "directoryToUnzipTo): \"\"\" Function to unzip a ZIP file recursively @param", "directory to convert @return JSON representation of a directory \"\"\"", "of errorMessages @param errorMessage the error message to add \"\"\"", "as zip_ref: zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest):", "the name of the image :param str image_name: the name", "Creates a dockerfile using the correct docker registry URL associated", "regular expression @param actualElement the actual element \"\"\" ret =", "[] \"\"\" Collection of Common Functions used by Build Scripts", "defined by regular expression @param actualElement the actual element \"\"\"", "continue elif latest_version < str_version: latest_version = str_version latest_timestamp =", "specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False)", "True if type(expected) is dict: if matches(expected,actual): # a matching", "\"/\" + image_name + \":\" + image_tag_latest # Write the", "def get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve the latest version of", "the highest version number (vX) and the latest timestamp (YYYYMMDD-HHmm).", "if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the", "to find in the bundle @param currentPath the path currently", "currentPath): \"\"\" Function to adds error messages to the global", "TEMP: Remove the suppressed verification once the docker cert location", "@return True if structure of the uploaded bundle follows IBM", "try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is", "os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------ # check package", "is always a dict, it may contain children if type=directory.", "subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath):", "slaves :param str image_namespace: the name of the image :param", "not None and expectedElement['type'] == actualElement['type']: ret = True return", "+ image_name + \":\" + image_tag_latest # Write the FROM", "from the mesos slaves :param str image_namespace: the name of", "except ValueError: continue try: latest_version latest_timestamp latest_tag except NameError: latest_version", "command dockerfile_from_cmd = \"FROM \" + docker_url + image_namespace +", "associated with the datacenter this script is being run on", "an image based on its tags: vX-YYYYMMDD-HHmm. The latest, by", "verify if an uploaded bundle follows IBM defined structure @param", "otherwise. \"\"\" ret = True try: the_zip_file = zipfile.ZipFile(filePath) badFile", "and must both be the same type. @param expectedElement the", "build paths recursively for error msg) @param isMatched (only used", "to convert @return JSON representation of a directory \"\"\" d", "currentPath) return isMatched def addToErrorMessages(errorMessage): \"\"\" Function to add error", "tag_list = json.loads(request.text) for tag in tag_list['tags']: if '-' not", "actual list of objects contains an expected element. Helper method", "the parent of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\")", "+ jsonFile) raise return data def directoryToJSON(directory): \"\"\" Function to", "working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo):", "+ \". \".join(str(x) for x in errorMessages) + \"]\") return", "# log error msg and return false if needed isMatched", "does not meet predefined structure. Could not proceed with deployment.\")", "def addToErrorMessages(errorMessage): \"\"\" Function to add error messages to the", "False otherwise. \"\"\" isMatched = True if type(expected) is dict:", "Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] =", "= False else: bundleIsGood = False addToErrorMessages(\"The uploaded bundle could", "the caller script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the", "usr, pwd): \"\"\" HTTP/HTTPS GET requests using external Python module", "dockerfile_parent_dir: path to the parent directory for the Dockerfile :param", "True try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile", "is not valid: \" + jsonFile) raise return data def", "if matchedPosition != -1: # if they match then verify", "= False else: ret = True except: ret = False", "The parent object is always a dict, it may contain", "that has the missing file or directory \"\"\" addToErrorMessages(\"A \"+", "{'name': os.path.basename(directory)} # the parent object is dict if os.path.isdir(directory):", "isMatched) return isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to adds", "file to be extracted @param directoryToUnzipTo the destination directory to", "for error msg) @param isMatched (only used for recursive calls)", "the DOCKER_CLIENT_TIMEOUT environment variable to 300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300'", "range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break if matchedPosition", "msg) @param isMatched (only used for recursive calls) @return True", "put report value , join all the messages together print", "element. May be defined by regular expression @param actualElement the", "dockerfile.write(dockerfile_from_cmd) # Close the open file instance dockerfile.close() def set_docker_client_timeout():", "None and expectedElement['type'] == actualElement['type']: ret = True return ret", "Retrieve the latest version of an image based on its", "'300' print(\"The timeout set for docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT']", "file to be extracted @param to_dir the destination directory to", "messages to the global array. @param expected the expected element", "dict, it may contain children if type=directory. A directory is", "tested @return True if the ZIP file is good. False", "file is good or bad @param filePath the zip file", "url of the REST call @param usr the functional username", "import argparse import zipfile import shutil import requests import datetime", "zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): \"\"\" Creates a", "log error msg and return false if needed isMatched =", "except: addToErrorMessages(\"The specified JSON file is not valid: \" +", "the error message to add \"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage])", "tag in tag_list['tags']: if '-' not in tag: continue str_version,", "this script is being run on :param str dockerfile_parent_dir: path", "def openJSONfile(jsonFile): \"\"\" Function to open a JSON file @param", "file is good. False otherwise. \"\"\" ret = True try:", "file is not valid: \" + jsonFile) raise return data", "matches(expected,actual): # a matching file or directory was found if", "directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo,", "always a dict, it may contain children if type=directory. A", "str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_version", "else: isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do", "subprocess import json import argparse import zipfile import shutil import", "checked (used to build paths recursively for error msg) @return", "zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is not None: ret", "tag_format) except ValueError: continue try: latest_version latest_timestamp latest_tag except NameError:", "path for the Dockerfile based on the parent of the", "the unzipped directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure,", "children if type=directory. A directory is composed of a list", "name of the bundle ZIP file \"\"\" print ('Validating bundle", "bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\")", "import requests import datetime import re import operator import unicodedata", "error messages to keep track of all error msgs errorMessages", "= \"FROM \" + docker_url + image_namespace + \"/\" +", "try: with open(jsonFile) as json_data_file: data = json.load(json_data_file) except: addToErrorMessages(\"The", "# global list of error messages to keep track of", "is good. False otherwise. \"\"\" ret = True try: the_zip_file", "list of errorMessages @param errorMessage the error message to add", "# TEMP: Remove the suppressed verification once the docker cert", "latest_version latest_timestamp latest_tag except NameError: latest_version = str_version latest_timestamp =", "a matching file or directory was not found if expected['fail-if-not-found']", "list then find out if they match anywhere, if so", "to verify if an uploaded bundle is: 1) a valid", "os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle into new", "is list: matchedPosition = -1 for i in range(0, len(actual)):", "currentPath) return False if type(expected) is list: for k in", "figured out and we specify it in REQUESTS_CA_BUNDLE return requests.get(url,", "return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd):", "str image_name: the name of the image :param str image_tag_latest:", "for k in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k],", "d def verifyBundleStructure(expected, actual, currentPath): \"\"\" Function to verify if", "import re import operator import unicodedata # global list of", "latest version of an image based on its tags: vX-YYYYMMDD-HHmm.", "error msg and return false if expectedElement['fail-if-not-found'] == \"yes\": #", "if files/directories match. They must have the same name and", "os.path.isdir(directory): d['type'] = \"directory\" # directory may have children #", "composed of a list and may contain files and/or directories.", "match then log the error msg and return false if", "zipFileIsGood(filePath): \"\"\" Function to test if a ZIP file is", "REST call @param usr the functional username for the docker", "+ \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): \"\"\" Function to", "else: bundleIsGood = False addToErrorMessages(\"The uploaded bundle could not be", "= -1 for i in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition", "bundle Structure =============================================== def openJSONfile(jsonFile): \"\"\" Function to open a", "stuff and package structure Json ------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred", "then verify it and its children if type(actual) is dict:", "@param zip_file the zip file to be extracted @param to_dir", "is located @param nameOfBundle name of the bundle ZIP file", "it may contain children if type=directory. A directory is composed", "\"\"\" Function to unzip a ZIP file recursively @param zipFileName", "if they don't match then log the error msg and", "instance dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment variable", "\". \".join(str(x) for x in errorMessages) + \"]\") return bundleIsGood", "Function to check if files/directories match. They must have the", "if structure of the uploaded bundle follows IBM defined structure.", "contain children if type=directory. A directory is composed of a", "try: # copy bundle into new working directory ----------------------------------------------------------- directoryToUnzipTo", "= True try: the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if", "of all error msgs errorMessages = [] \"\"\" Collection of", "functional user @return the latest image tag \"\"\" tag_list_url =", "unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------", "the open file instance dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets the", "structure. False otherwise. \"\"\" isMatched = True if type(expected) is", "version number (vX) and the latest timestamp (YYYYMMDD-HHmm). @param registry_path", "Python module requests @param url the url of the REST", "may contain files and/or directories. @param directory directory to convert", "import subprocess import json import argparse import zipfile import shutil", "\"\"\" # if actual is a dict then verify it", "[directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type'] = \"file\" return", "subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile,", "# Form the path for the Dockerfile based on the", "create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): \"\"\" Creates a dockerfile using", "has the missing file or directory \"\"\" addToErrorMessages(\"A \"+ expected['type']", "ZIP is located @param nameOfBundle name of the bundle ZIP", "currentPath) # do further checking else: # a matching file", "@param actual the JSON representation of the actual structure of", "ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and", "an uploaded bundle is: 1) a valid zip file 2)", "if actual is a dict then verify it and its", "find in the bundle @param currentPath the path currently being", "the expected element \"\"\" # if actual is a dict", "the latest version of an image based on its tags:", "verify=False) def get_latest_tag(registry_path, usr, pwd): \"\"\" Retrieve the latest version", "their children too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] ,", "str_version, str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp,", "<filename>scripts/common_lib/build_lib.py #!/usr/bin/env python3.6 import os import subprocess import json import", "with open(jsonFile) as json_data_file: data = json.load(json_data_file) except: addToErrorMessages(\"The specified", "str_version: latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag", "isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else :", "expected['children'] == \"_any\": isMatched = isMatched & True # if", "representation of the IBM defined structure @param actual the JSON", "expected element to find in the bundle @param currentPath the", "defined to be the one with the highest version number", "verifyBundleStructure. @param actual list of the actual files and directories", "False logBundleStructureErrorMessage(expectedElement, currentPath) return isMatched def addToErrorMessages(errorMessage): \"\"\" Function to", "True except: ret = False return ret def verifyZipFile(zipDirectory, nameOfBundle):", "str_version latest_timestamp = dt_timestamp latest_tag = tag else: if latest_timestamp", "False if type(expected) is list: for k in range(0,len(expected)): isMatched", "valid zip file 2) follows IBM defined structure @param zipDirectory", "\"\"\" headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP: Remove", "= os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory)", "verify if an actual list of objects contains an expected", "requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path, usr, pwd): \"\"\"", "highest version number (vX) and the latest timestamp (YYYYMMDD-HHmm). @param", "docker cert location # is figured out and we specify", "deployment.\") # clean up unzipped stuff and package structure Json", "not proceed with deployment.\") # out put report value ,", "extract the zip file to \"\"\" # update if zipFileName.endswith(\".zip\"):", "os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip the bundle ----------------------------------------------------------------------------------", "image_namespace, image_name, image_tag_latest): \"\"\" Creates a dockerfile using the correct", "== \"_any\": isMatched = isMatched & True # if the", "k in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath,", "by definition, is defined to be the one with the", "objects in a given directory into JSON form. The parent", "Close the open file instance dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets", "verify bundle Structure =============================================== def openJSONfile(jsonFile): \"\"\" Function to open", "print (\"report=[\" + \". \".join(str(x) for x in errorMessages) +", "and expectedElement['type'] == actualElement['type']: ret = True return ret def", "the zip file to \"\"\" with zipfile.ZipFile(zip_file, \"r\") as zip_ref:", "json.loads(request.text) for tag in tag_list['tags']: if '-' not in tag:", "actual the JSON representation of the actual structure of the", ":param str image_name: the name of the image :param str", "to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close the open file instance", "the docker registry VIP accessible from the mesos slaves :param", "the suppressed verification once the docker cert location # is", "ret = False else: ret = True except: ret =", ":returns: None \"\"\" # Form the path for the Dockerfile", "the ZIP file is good. False otherwise. \"\"\" ret =", "os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") # ======================= verify bundle Structure ===============================================", "False if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] ==", "# update if zipFileName.endswith(\".zip\"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo)", "paths recursively for error msg) @param isMatched (only used for", "usr, pwd): \"\"\" Retrieve the latest version of an image", "composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in", "= zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is not None:", "image_name, image_tag_latest): \"\"\" Creates a dockerfile using the correct docker", "build scripts. \"\"\" def get(url, usr, pwd): \"\"\" HTTP/HTTPS GET", "the zip file to be extracted @param to_dir the destination", "correct docker registry URL associated with the datacenter this script", "of objects contains an expected element. Helper method to verifyBundleStructure.", "tag of the base image :returns: None \"\"\" # Form", "def set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT environment variable to 300", "they match then verify their children too isMatched = isMatched", "else: if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag =", "Function to unzip a ZIP file recursively @param zipFileName the", "a ZIP file is good or bad @param filePath the", "matchedPosition = -1 for i in range(0, len(actual)): if matches(expectedElement,actual[i]):", "mesos slaves :param str image_namespace: the name of the image", "response \"\"\" headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } # TEMP:", "else: # a matching file or directory was not found", "currentPath) else : # if they don't match then log", "JSON response \"\"\" headers = { 'Accept': 'application/vnd.docker.distribution.manifest.v1+json', } #", "nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------ #", "the mesos slaves :param str image_namespace: the name of the", "!= -1: # if they match then verify their children", "the docker registry @param pwd the password for the docker", "isMatched def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to adds error messages", "def get(url, usr, pwd): \"\"\" HTTP/HTTPS GET requests using external", "x in os.listdir(directory)] else: d['type'] = \"file\" return d def", "file to \"\"\" # update if zipFileName.endswith(\".zip\"): #check if it's", "#check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in", "latest_timestamp = dt_timestamp latest_tag = tag else: if latest_timestamp <", "do further checking else: # a matching file or directory", "+ image_tag_latest # Write the FROM command string to the", "@param directory directory to convert @return JSON representation of a", "of the uploaded bundle @param currentPath the path currently being", "defined structure @param expected the JSON representation of the IBM", "dt_timestamp latest_tag = tag else: if latest_timestamp < dt_timestamp: latest_timestamp", "for tag in tag_list['tags']: if '-' not in tag: continue", "a list composed of more files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for", "A collection of common functions shared by each individual build", "the docker registry functional user @return the latest image tag", "Function to convert objects in a given directory into JSON", "Common Functions used by Build Scripts A collection of common", "not None: ret = False else: ret = True except:", "tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format) except ValueError: continue try:", "parent object is always a dict, it may contain children", "is good or bad @param filePath the zip file to", "x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo,", "package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) #", "form. The parent object is always a dict, it may", "uploaded bundle follows IBM defined structure. False otherwise. \"\"\" isMatched", "the_zip_file = zipfile.ZipFile(filePath) badFile = the_zip_file.testzip() if badFile is not", "in range(0, len(actual)): if matches(expectedElement,actual[i]): matchedPosition = i break if", "(\"report=[\" + \". \".join(str(x) for x in errorMessages) + \"]\")", "must both be the same type. @param expectedElement the expected", "proceed with deployment.\") # out put report value , join", "image_tag_latest # Write the FROM command string to the Dockerfile", "pwd) tag_list = json.loads(request.text) for tag in tag_list['tags']: if '-'", "latest_timestamp = dt_timestamp latest_tag = tag else: if latest_version >", "if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected)", "the FROM command string to the Dockerfile dockerfile.write(dockerfile_from_cmd) # Close", "expression @param actualElement the actual element \"\"\" ret = False", "update if zipFileName.endswith(\".zip\"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName)", "directory directory to convert @return JSON representation of a directory", "the actual element \"\"\" ret = False if re.fullmatch(expectedElement['name'], actualElement['name'])", "file 2) follows IBM defined structure @param zipDirectory where the", "JSON representation of the IBM defined structure @param actual the", "isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a list", "# if actual is a list then find out if", "the messages together print (\"report=[\" + \". \".join(str(x) for x", "requests @param url the url of the REST call @param", "break if matchedPosition != -1: # if they match then", "base image :returns: None \"\"\" # Form the path for", "global list of error messages to keep track of all", "pwd): \"\"\" Retrieve the latest version of an image based", "must have the same name and must both be the", "\"yes\": logBundleStructureErrorMessage(expected, currentPath) return False if type(expected) is list: for", "the url of the REST call @param usr the functional", "then verify their children too isMatched = isMatched & verifyBundleStructure(expectedElement,", "to keep track of all error msgs errorMessages = []", "def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\" Function to verify if", "actual['children'], currentPath) # do further checking else: # a matching", ": # if they don't match then log the error", "= True bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: #", "a dict then verify it and its children if type(actual)", "zip file to be extracted @param directoryToUnzipTo the destination directory", "script dockerfile_path = os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the Dockerfile dockerfile", "open(dockerfile_path, \"w+\") # Format the FROM command dockerfile_from_cmd = \"FROM", "docker_url + image_namespace + \"/\" + image_name + \":\" +", "Structure =============================================== def openJSONfile(jsonFile): \"\"\" Function to open a JSON", "dict if os.path.isdir(directory): d['type'] = \"directory\" # directory may have", "unzipped directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\")", "actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory to JSON", "os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x ) unzipRecursively(subfile, subdirectory) return def", "operator import unicodedata # global list of error messages to", "msg and return false if needed isMatched = False logBundleStructureErrorMessage(expectedElement,", "the password for the docker registry functional user @return the", "errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to unzip a", "& True # if the contents of the directory can", "an actual list of objects contains an expected element. Helper", "messages to keep track of all error msgs errorMessages =", "def verifyBundleStructure(expected, actual, currentPath): \"\"\" Function to verify if an", "300 \"\"\" os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout set for docker", "docker registry URL associated with the datacenter this script is", "+ \" seconds\") # ======================= verify bundle Structure =============================================== def", "into JSON form. The parent object is always a dict,", "@param expectedElement the expected element. May be defined by regular", "of an image based on its tags: vX-YYYYMMDD-HHmm. The latest,", "if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag = tag", "re import operator import unicodedata # global list of error", "extracted @param to_dir the destination directory to extract the zip", "of common functions shared by each individual build scripts. \"\"\"", "+\" is missing from the path: \\\"\" + currentPath +", "open file instance dockerfile.close() def set_docker_client_timeout(): \"\"\" Sets the DOCKER_CLIENT_TIMEOUT", "isMatched & True # if the contents of the directory", "@param expectedElement the expected element to find in the bundle", "the global array. @param expected the expected element @param currentPath", ", currentPath) else : # if they don't match then", "the bundle ZIP file \"\"\" print ('Validating bundle structure...') bundleIsGood", "directories. @param directory directory to convert @return JSON representation of", "the correct docker registry URL associated with the datacenter this", "follows IBM defined structure @param expected the JSON representation of", "Function to verify if an uploaded bundle follows IBM defined", "if type=directory. A directory is composed of a list and", "json.load(json_data_file) except: addToErrorMessages(\"The specified JSON file is not valid: \"", "= False return ret def verifyZipFile(zipDirectory, nameOfBundle): \"\"\" Function to", "May be defined by regular expression @param actualElement the actual", "if badFile is not None: ret = False else: ret", "expected element \"\"\" # if actual is a dict then", "latest_tag = tag else: if latest_version > str_version: continue elif", "by regular expression @param actualElement the actual element \"\"\" ret", "\"\"\" Function to verify if an uploaded bundle is: 1)", "the JSON representation of the IBM defined structure @param actual", "os.path.basename(directory)} # the parent object is dict if os.path.isdir(directory): d['type']", "to be the one with the highest version number (vX)", "type. @param expectedElement the expected element. May be defined by", "= dt_timestamp latest_tag = tag else: if latest_version > str_version:", "image_tag_latest): \"\"\" Creates a dockerfile using the correct docker registry", "expectedElement['type'] == actualElement['type']: ret = True return ret def verifyActualContainsExpectedElement(actual,", "if re.fullmatch(expectedElement['name'], actualElement['name']) is not None and expectedElement['type'] == actualElement['type']:", "is not None: ret = False else: ret = True", "str image_namespace: the name of the image :param str image_name:", "if so get the matched position elif type(actual) is list:", "version tag of the base image :returns: None \"\"\" #", "test if a ZIP file is good or bad @param", "file to be tested @return True if the ZIP file", ":param str image_tag_latest: the latest version tag of the base", "= os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle into", "it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory", "return data def directoryToJSON(directory): \"\"\" Function to convert objects in", "\"\"\" try: with open(jsonFile) as json_data_file: data = json.load(json_data_file) except:", "object is always a dict, it may contain children if", "individual build scripts. \"\"\" def get(url, usr, pwd): \"\"\" HTTP/HTTPS", "the children in a directory is a list composed of", "zipFileName.endswith(\".zip\"): #check if it's a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x", "of the bundle ZIP file \"\"\" print ('Validating bundle structure...')", "type(expected) is dict: if matches(expected,actual): # a matching file or", "\"\"\" # update if zipFileName.endswith(\".zip\"): #check if it's a .zip", "name of the image :param str image_tag_latest: the latest version", "given directory into JSON form. The parent object is always", "\"\"\" Function to verify if an actual list of objects", "if type(expected) is dict: if matches(expected,actual): # a matching file", "if zipFileIsGood(bundleZip): try: # copy bundle into new working directory", "docker_url: the docker registry VIP accessible from the mesos slaves", "docker registry functional user @return the latest image tag \"\"\"", "to build paths recursively for error msg) @param isMatched (only", "errorMessages @param errorMessage the error message to add \"\"\" print(errorMessage)", "= True except: ret = False return ret def verifyZipFile(zipDirectory,", "isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath) else : # if", "recursively for error msg) @param isMatched (only used for recursive", "of the uploaded bundle follows IBM defined structure. False otherwise.", "structure of bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure =", "actual objects contain the expected element \"\"\" # if actual", "messages together print (\"report=[\" + \". \".join(str(x) for x in", "latest_tag except NameError: latest_version = str_version latest_timestamp = dt_timestamp latest_tag", "matching file or directory was found if expected['type'] == 'directory':", "'directory': currentPath = currentPath + actual['name'] + \"/\" if expected['children']", "deployment.\") bundleIsGood = False else: bundleIsGood = False addToErrorMessages(\"The uploaded", "the zip file to \"\"\" # update if zipFileName.endswith(\".zip\"): #check", "ret = True except: ret = False return ret def", "uploaded bundle is: 1) a valid zip file 2) follows", "get(url, usr, pwd): \"\"\" HTTP/HTTPS GET requests using external Python", "not found if expected['fail-if-not-found'] == \"yes\": logBundleStructureErrorMessage(expected, currentPath) return False", "latest_version = str_version latest_timestamp = dt_timestamp latest_tag = tag else:", "json_data_file: data = json.load(json_data_file) except: addToErrorMessages(\"The specified JSON file is", "bundle ---------------------------------------------------------------------------------- unzipRecursively(os.path.join(directoryToUnzipTo, nameOfBundle), os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of", "function for extracting zip files @param zip_file the zip file", "type(actual) is dict: isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) #", "is not None and expectedElement['type'] == actualElement['type']: ret = True", "to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure, actualBundleStructure, \"\") if not", "\"yes\": # log error msg and return false if needed", "os.environ['DOCKER_CLIENT_TIMEOUT'] = '300' print(\"The timeout set for docker client: \"", "directory to extract the zip file to \"\"\" with zipfile.ZipFile(zip_file,", "be tested @return True if the ZIP file is good.", "and we specify it in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd),", "ret def verifyActualContainsExpectedElement(actual, expectedElement, currentPath, isMatched): \"\"\" Function to verify", "directoryToJSON(directory): \"\"\" Function to convert objects in a given directory", "def logBundleStructureErrorMessage(expected, currentPath): \"\"\" Function to adds error messages to", ") unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): \"\"\" Function to test", "Remove the suppressed verification once the docker cert location #", "user @return a JSON response \"\"\" headers = { 'Accept':", "meet predefined structure. Could not proceed with deployment.\") # clean", "str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\" try: dt_timestamp = datetime.datetime.strptime(str_timestamp, tag_format)", "print(\"The timeout set for docker client: \" + os.environ['DOCKER_CLIENT_TIMEOUT'] +", "specified JSON file is not valid: \" + jsonFile) raise", "openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\")) actualBundleStructure = directoryToJSON(directoryToUnzipTo) # convert the unzipped directory", "in REQUESTS_CA_BUNDLE return requests.get(url, auth=(usr, pwd), headers=headers, verify=False) def get_latest_tag(registry_path,", "registry URL associated with the datacenter this script is being", "convert @return JSON representation of a directory \"\"\" d =", "return false if expectedElement['fail-if-not-found'] == \"yes\": # log error msg", "raise return data def directoryToJSON(directory): \"\"\" Function to convert objects", "elif type(actual) is list: matchedPosition = -1 for i in", "> str_version: continue elif latest_version < str_version: latest_version = str_version", "= currentPath + actual['name'] + \"/\" if expected['children'] == \"_any\":", "data def directoryToJSON(directory): \"\"\" Function to convert objects in a", "os.path.join(dockerfile_parent_dir, \"Dockerfile\") # Create the Dockerfile dockerfile = open(dockerfile_path, \"w+\")", "= i break if matchedPosition != -1: # if they", "dockerfile using the correct docker registry URL associated with the", "isMatched = True if type(expected) is dict: if matches(expected,actual): #", "to verify if an actual list of objects contains an", "bundle ------------------------------------------------------------------------ # check package stucture expectedPackageStructure = openJSONfile(os.path.join(zipDirectory, \"bundle-definition.json\"))", "of the actual structure of the uploaded bundle @param currentPath", "the list of actual objects contain the expected element \"\"\"", "children too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition] , currentPath)", "zip_ref.extractall(to_dir) zip_ref.close() def create_dockerfile(dockerfile_parent_dir, docker_url, image_namespace, image_name, image_tag_latest): \"\"\" Creates", "valid: \" + jsonFile) raise return data def directoryToJSON(directory): \"\"\"", "not in tag: continue str_version, str_dash, str_timestamp = tag.partition('-') tag_format=\"%Y%m%d-%H%M\"", "global list of errorMessages @param errorMessage the error message to", "isMatched (only used for recursive calls) @return True if the", "files/directories d['children'] = [directoryToJSON(os.path.join(directory,x)) for x in os.listdir(directory)] else: d['type']", "bundle could not be unzipped. Could not proceed with deployment.\")", "\"_any\": isMatched = isMatched & True # if the contents", "to add \"\"\" print(errorMessage) global errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName,", "bundleZip = os.path.join(zipDirectory, nameOfBundle) if zipFileIsGood(bundleZip): try: # copy bundle", "False addToErrorMessages(\"The uploaded bundle could not be unzipped. Could not", "all the messages together print (\"report=[\" + \". \".join(str(x) for", "+ actual['name'] + \"/\" if expected['children'] == \"_any\": isMatched =", "representation of a directory \"\"\" d = {'name': os.path.basename(directory)} #", "element \"\"\" ret = False if re.fullmatch(expectedElement['name'], actualElement['name']) is not", "zipfile import shutil import requests import datetime import re import", "\"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement): \"\"\" Function to check", "currentPath + actual['name'] + \"/\" if expected['children'] == \"_any\": isMatched", "isMatched = isMatched & verifyActualContainsExpectedElement(actual, expected[k], currentPath, isMatched) return isMatched", "follows IBM defined structure @param zipDirectory where the bundle ZIP", "currentPath the current path we are on that has the", "isMatched = isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual is", "convert the unzipped directory to JSON file bundleIsGood = verifyBundleStructure(expectedPackageStructure,", "extract the zip file to \"\"\" with zipfile.ZipFile(zip_file, \"r\") as", "the path: \\\"\" + currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def", "\"temp\") if not os.path.exists(directoryToUnzipTo): os.makedirs(directoryToUnzipTo) shutil.copy(bundleZip, os.path.join(directoryToUnzipTo, nameOfBundle)) # unzip", "# if they match then verify their children too isMatched", "to \"\"\" # update if zipFileName.endswith(\".zip\"): #check if it's a", "is list: for k in range(0,len(expected)): isMatched = isMatched &", "defined structure. False otherwise. \"\"\" isMatched = True if type(expected)", "latest_tag def unzip(zip_file, to_dir): \"\"\" Generic unzip function for extracting", "@param directoryToUnzipTo the destination directory to extract the zip file", "matchedPosition != -1: # if they match then verify their", "the FROM command dockerfile_from_cmd = \"FROM \" + docker_url +", "\\\"\" + currentPath + \"\\\"\") addToErrorMessages(expected['error-message-if-fails']) return def matches(expectedElement, actualElement):", "currentPath, isMatched): \"\"\" Function to verify if an actual list", "an uploaded bundle follows IBM defined structure @param expected the", "unzipRecursively(subfile, subdirectory) return def zipFileIsGood(filePath): \"\"\" Function to test if", "\"\"\" Generic unzip function for extracting zip files @param zip_file", "requests import datetime import re import operator import unicodedata #", "verification once the docker cert location # is figured out", "# a matching file or directory was found if expected['type']", "its tags: vX-YYYYMMDD-HHmm. The latest, by definition, is defined to", "str dockerfile_parent_dir: path to the parent directory for the Dockerfile", "tag else: if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp latest_tag", "= isMatched & verifyBundleStructure(expectedElement,actual, currentPath) # if actual is a", "for x in os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile =", "list: for k in range(0,len(expected)): isMatched = isMatched & verifyActualContainsExpectedElement(actual,", "errorMessges errorMessages.extend([errorMessage]) return def unzipRecursively(zipFileName, directoryToUnzipTo): \"\"\" Function to unzip", "verify if an uploaded bundle is: 1) a valid zip", "os.listdir(directoryToUnzipTo): subdirectory = os.path.join(directoryToUnzipTo, os.path.splitext(x)[0]) subfile = os.path.join(directoryToUnzipTo, x )", "addToErrorMessages(\"A \"+ expected['type'] +\" is missing from the path: \\\"\"", "being run on :param str dockerfile_parent_dir: path to the parent", "Helper method to verifyBundleStructure. @param actual list of the actual", "the path currently being checked (used to build paths recursively", "# is figured out and we specify it in REQUESTS_CA_BUNDLE", "------------------------------------------------- shutil.rmtree(directoryToUnzipTo) except: addToErrorMessages(\"Exception occurred while verifying bundle structure. Could", "latest_tag = tag return latest_tag def unzip(zip_file, to_dir): \"\"\" Generic", "= tag else: if latest_timestamp < dt_timestamp: latest_timestamp = dt_timestamp", "scripts. \"\"\" def get(url, usr, pwd): \"\"\" HTTP/HTTPS GET requests", "file @return the loaded JSON file \"\"\" try: with open(jsonFile)", "= False addToErrorMessages(\"The uploaded bundle could not be unzipped. Could", "on its tags: vX-YYYYMMDD-HHmm. The latest, by definition, is defined", "defined structure @param actual the JSON representation of the actual", "if matches(expected,actual): # a matching file or directory was found", "= isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further checking", "same type. @param expectedElement the expected element. May be defined", "a JSON file @param jsonFile path to the JSON file", "representation of the actual structure of the uploaded bundle @param", "= dt_timestamp latest_tag = tag else: if latest_timestamp < dt_timestamp:", "Form the path for the Dockerfile based on the parent", "verify their children too isMatched = isMatched & verifyBundleStructure(expectedElement, actual[matchedPosition]", "on the parent of the caller script dockerfile_path = os.path.join(dockerfile_parent_dir,", "@param isMatched (only used for recursive calls) @return True if", "os.path.join(directoryToUnzipTo, os.path.splitext(nameOfBundle)[0])) # verify structure of bundle ------------------------------------------------------------------------ # check", "image_name: the name of the image :param str image_tag_latest: the", "= isMatched & True # if the contents of the", "contain files and/or directories. @param directory directory to convert @return", "global array. @param expected the expected element @param currentPath the", "= json.load(json_data_file) except: addToErrorMessages(\"The specified JSON file is not valid:", "structure. Could not proceed with deployment.\") # clean up unzipped", "return def matches(expectedElement, actualElement): \"\"\" Function to check if files/directories", "isMatched = isMatched & verifyBundleStructure(expected['children'], actual['children'], currentPath) # do further", "to be extracted @param to_dir the destination directory to extract", "password for the docker registry functional user @return a JSON", "openJSONfile(jsonFile): \"\"\" Function to open a JSON file @param jsonFile", "= registry_path + '/tags/list' request = get(tag_list_url, usr, pwd) tag_list", "a .zip unzip(zipFileName,directoryToUnzipTo) os.remove(zipFileName) for x in os.listdir(directoryToUnzipTo): subdirectory =", "is composed of a list and may contain files and/or", "new working directory ----------------------------------------------------------- directoryToUnzipTo = os.path.join(zipDirectory, \"temp\") if not", "+ os.environ['DOCKER_CLIENT_TIMEOUT'] + \" seconds\") # ======================= verify bundle Structure", "if expected['type'] == 'directory': currentPath = currentPath + actual['name'] +", "matched position elif type(actual) is list: matchedPosition = -1 for", "directory \"\"\" addToErrorMessages(\"A \"+ expected['type'] +\" is missing from the", "< str_version: latest_version = str_version latest_timestamp = dt_timestamp latest_tag =", "the same name and must both be the same type.", "errorMessage the error message to add \"\"\" print(errorMessage) global errorMessges", "\"\"\" def get(url, usr, pwd): \"\"\" HTTP/HTTPS GET requests using", "@return True if the list of actual objects contain the" ]
[ "M_BL, cameraMatrix, visualize=True): kps_pr = [] for category_id, preds in", "rm_ep = (int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1]", "to rgb and depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub", "np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B def pre_process(rgb_img, depth_img): inp_image", "2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])),", "camera intrinsic matrix :param depth_image: depth image :param depth_scale: depth", "tag M_CL, corners = get_M_CL_info(gray, img, False) # remove aruco", "corner[0] + corner[1] < min: min = corner[0] + corner[1]", "= np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\",", "min: min = corner[0] + corner[1] tl_pxl = [int(corner[0]), int(corner[1])]", "blue channel with the depth channel inp_image = pre_process(img_wo_at, depth)", "depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8) depth = (depth_raw *", "%d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return", "kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) # motor 7 is clockwise if", "0), 2) # draw left-middle, right-middle and center key-points rgb_img", "def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw RGB and depth input", "cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray =", "kps_pr: f_w, f_h = 640. / 512., 480. / 512.", "''' project 2d pixel on the image to 3d by", "/ 5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) / 5.)) rgb_img", "convert ROS Image type to OpenCV Image type cv_bridge =", "calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to", "0, 0, 0] f_w, f_h = 640./512., 480./512. kp_lm =", "-orientation # compute the open width dist = np.linalg.norm(kp_lm_3d[:2] -", "= (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix)", "rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to rgb and depth channel", "replacement pixel value rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1] -", "img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8) depth", "0] # sort by the confidence score kps_pr = sorted(kps_pr,", "utils.utils import AverageMeter from datasets.dataset_factory import dataset_factory from detectors.detector_factory import", "(256, 256)) return inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw", "= 640. / 512., 480. / 512. kp_lm = (int(kp_pr[0]", "[] for category_id, preds in net_output.items(): if len(preds) == 0:", "default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352],", "aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403,", "CvBridge() cv2.WITH_QT = False # Get camera calibration parameters cam_param", ":]) ret = ret[\"results\"] loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL,", "np import sys import rospy from std_msgs.msg import Bool from", "# parameters markerLength_CL = 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL", "corners = get_M_CL_info(gray, img, False) # remove aruco tag from", "# sort by the confidence score kps_pr = sorted(kps_pr, key=lambda", "2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1],", "# Publisher of perception result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10)", "distortion of Realsense D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102,", "(int(kp_pr[0] * f_w), int(kp_pr[1] * f_h)) kp_rm = (int(kp_pr[2] *", "# transformation from the robot base to aruco tag M_BL", "markerLength_CL) return M_CL, corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image, corners): img_out", "depth info :param pixel: x, y :param M_CL: trans from", "= message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1, 0.1) ts.registerCallback(kinect_rgbd_callback)", "nei_range + 1): for delta_y in range(-nei_range, nei_range + 1):", "OpenCV Image type cv_bridge = CvBridge() cv2.WITH_QT = False #", "Detector(opt) # Publisher of perception result pub_res = rospy.Publisher('/result', Float64MultiArray,", "and h/12. <= y <= 11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map,", "[ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0., 0., 0., 1.", "key=lambda x: x[-1], reverse=True) # select the top 1 grasp", "= corner[0] + corner[1] tl_pxl = [int(corner[0]), int(corner[1])] if corner[0]", "draw left-middle, right-middle and center key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]),", "first frame, it may contain nothing if ids_CL is None:", "dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw arrow for left-middle", "image into the network ret = detector.run(inp_image[:, :, :]) ret", "Image, CameraInfo from cv_bridge import CvBridge, CvBridgeError import message_filters import", "'__main__': # initialize ros node rospy.init_node(\"Static_grasping\") # Bridge to convert", "the detected pixel is 0, check the depth of its", "of pixel with respect to base frame ''' depth =", "cameraMatrix) center_3d = project(center, depth_map, M_CL, M_BL, cameraMatrix) orientation =", "tag from input image to avoid mis-detection if corners is", "== 0: for delta_x in range(-nei_range, nei_range + 1): for", "= rep_color return img_out def project(pixel, depth_image, M_CL, M_BL, cameraMatrix):", "int(corner[1])] # get the replacement pixel value rep_color = img_out[tl_pxl[0]", "kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1] * f_h)) kp_rm =", "and isWithinRange(kp_rm, 640, 480): res = kp_pr break if res", "else: orientation = -orientation # compute the open width dist", "(%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL)", "np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) # initialize GKNet Detector opt", "json import cv2 import cv2.aruco as aruco import numpy as", "import CvBridge, CvBridgeError import message_filters import torch from external.nms import", "in range(tl_pxl[0] - 45, br_pxl[0] + 46): img_out[h, w, :]", "w, h): x, y = pxl[:] return w/12. <= x", ":] def aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy() # find the", "[0, 0, 0, 0] # sort by the confidence score", "# initialize ros node rospy.init_node(\"Static_grasping\") # Bridge to convert ROS", "0, 0), 2) # draw left-middle, right-middle and center key-points", "M_CL, corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy()", "preds in net_output.items(): if len(preds) == 0: continue for pred", "cv_depth) cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray", "kps_pr = [] for category_id, preds in net_output.items(): if len(preds)", "dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img", "w/12. <= x <= 11*w/12 and h/12. <= y <=", "%(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return M_CL,", "(kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1])", "it may contain nothing if ids_CL is None: return default_M_CL,", "+ (kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1] -", "contain nothing if ids_CL is None: return default_M_CL, None rvec_CL,", "0.32000], [0., 0., 1., -0.0450], [0., 0., 0., 1.00000]]) #", "rep_color return img_out def project(pixel, depth_image, M_CL, M_BL, cameraMatrix): '''", "return inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw RGB and", "kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d =", "draw arrow for left-middle and right-middle key-points lm_ep = (int(kp_lm[0]", "(int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img,", "1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B def", "intrinsic matrix of Realsense D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064],", "opts import opts from logger import Logger from utils.utils import", "the depth of its neighbors # by counter-clock wise nei_range", "transformation from the camera to aruco tag M_CL, corners =", "0, 0, 1]) if visualize: # print('aruco is located at", "+ (kp_lm[1] - kp_rm[1]) / 5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm,", "import Bool from std_msgs.msg import Float64MultiArray from sensor_msgs.msg import Image,", "0.30000], [0., 1., 0., 0.32000], [0., 0., 1., -0.0450], [0.,", "1. ]] ) # camera intrinsic matrix of Realsense D435", "pixel[1] + delta_y] depth = depth_image[nei[1], nei[0]] if depth !=", "aruco tag from input image to avoid mis-detection if corners", "= ret[\"results\"] loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix)", "== '__main__': # initialize ros node rospy.init_node(\"Static_grasping\") # Bridge to", "= detector_factory[opt.task] detector = Detector(opt) # Publisher of perception result", "project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix) center_3d = project(center, depth_map, M_CL,", "numpy as np import sys import rospy from std_msgs.msg import", "h): x, y = pxl[:] return w/12. <= x <=", "coordinate of pixel with respect to base frame ''' depth", "import os import json import cv2 import cv2.aruco as aruco", "None rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL,", "depth != 0: break nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot(", "cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8)", "cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth)", "= np.zeros((4, 4)) M_CL[:3, :3] = dst_CL M_CL[:3, 3] =", ":param M_CL: trans from camera to aruco tag :param cameraMatrix:", "gray = img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8) # get", "2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2)", "np.array([0, 0, 0, 1]) if visualize: # print('aruco is located", "== 0: return [0, 0, 0, 0] # sort by", "opts().parse() Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector", "w, :] = rep_color return img_out def project(pixel, depth_image, M_CL,", "max = -sys.maxsize tl_pxl = None br_pxl = None for", "+ corner[1] tl_pxl = [int(corner[0]), int(corner[1])] if corner[0] + corner[1]", "visualize: # print('aruco is located at mean position (%d, %d)'", "that trans raw data to mm :return: q_B: 3d coordinate", "isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640, 480): res = kp_pr", "tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689,", "2: orientation = np.pi - orientation elif orientation < -np.pi", "np.array([pixel[0] * depth, pixel[1] * depth, depth])) q_C = np.array([pxl[0],", "= np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) # motor 7", "mm :return: q_B: 3d coordinate of pixel with respect to", "(int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1]", "from __future__ import absolute_import from __future__ import division from __future__", "= sys.maxsize max = -sys.maxsize tl_pxl = None br_pxl =", "br_pxl = None for corner in corners: if corner[0] +", "0., 0., 1.00000]]) # default transformation from the camera to", "the robot base to aruco tag M_BL = np.array([[1., 0.,", "0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0., 0., 0.,", "nei_range + 1): nei = [pixel[0] + delta_x, pixel[1] +", "None for kp_pr in kps_pr: f_w, f_h = 640. /", "0., 0.32000], [0., 0., 1., -0.0450], [0., 0., 0., 1.00000]])", "-np.pi / 2: orientation = -np.pi - orientation else: orientation", ":, 0] = depth_img inp_image = cv2.resize(inp_image, (256, 256)) return", "compute the open width dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) #", "the camera to aruco tag M_CL, corners = get_M_CL_info(gray, img,", "break nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth,", "orientation else: orientation = -orientation # compute the open width", "46): img_out[h, w, :] = rep_color return img_out def project(pixel,", "import numpy as np import sys import rospy from std_msgs.msg", "depth_image[pixel[1], pixel[0]] # if the depth of the detected pixel", "is 0, check the depth of its neighbors # by", "= rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to rgb and depth", "camera to aruco tag M_CL, corners = get_M_CL_info(gray, img, False)", "Dataset) print(opt) Detector = detector_factory[opt.task] detector = Detector(opt) # Publisher", "None for corner in corners: if corner[0] + corner[1] <", "cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr =", "delta_y in range(-nei_range, nei_range + 1): nei = [pixel[0] +", "0.0]) # initialize GKNet Detector opt = opts().parse() Dataset =", "the replacement pixel value rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1]", "position (%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL,", "pixel is 0, check the depth of its neighbors #", "from cv_bridge import CvBridge, CvBridgeError import message_filters import torch from", "rm_ep, (0, 0, 0), 2) # draw left-middle, right-middle and", "0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0,", "rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for the first frame,", "tl_pxl = [int(corner[0]), int(corner[1])] if corner[0] + corner[1] > max:", "[pixel[0] + delta_x, pixel[1] + delta_y] depth = depth_image[nei[1], nei[0]]", "raw depth image :return: None \"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data,", "_init_paths import os import json import cv2 import cv2.aruco as", "aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for the first frame, it may", "3] = tvec_CL M_CL[3, :] = np.array([0, 0, 0, 1])", "\"\"\" Save raw RGB and depth input from Kinect V1", "img_out[tl_pxl[0] - 10, tl_pxl[1] - 10, :] for h in", "= np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [", "depth = depth_image[nei[1], nei[0]] if depth != 0: break if", "= cv_depth_arr.copy() gray = img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8)", "in range(tl_pxl[1] - 45, br_pxl[1] + 46): for w in", "0.00134, -0.00102, 0.0]) # initialize GKNet Detector opt = opts().parse()", "1., -0.0450], [0., 0., 0., 1.00000]]) # default transformation from", "= depth_img inp_image = cv2.resize(inp_image, (256, 256)) return inp_image def", "y = pxl[:] return w/12. <= x <= 11*w/12 and", "ret = detector.run(inp_image[:, :, :]) ret = ret[\"results\"] loc_ori =", "inp_image = cv2.resize(inp_image, (256, 256)) return inp_image def kinect_rgbd_callback(rgb_data, depth_data):", "pxl[:] return w/12. <= x <= 11*w/12 and h/12. <=", "base frame ''' depth = depth_image[pixel[1], pixel[0]] # if the", "# get the replacement pixel value rep_color = img_out[tl_pxl[0] -", "M_CL = np.zeros((4, 4)) M_CL[:3, :3] = dst_CL M_CL[:3, 3]", "sorted(kps_pr, key=lambda x: x[-1], reverse=True) # select the top 1", "<= 11*w/12 and h/12. <= y <= 11*h/12 def KpsToGrasppose(net_output,", "/ 2: orientation = -np.pi - orientation else: orientation =", "!= 0: break nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0]", "to aruco tag M_BL = np.array([[1., 0., 0., 0.30000], [0.,", "opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task] detector = Detector(opt) #", "- 10, :] for h in range(tl_pxl[1] - 45, br_pxl[1]", "corners) # replace blue channel with the depth channel inp_image", "img_out = rgb_image.copy() # find the top-left and right-bottom corners", "image :param depth_data: raw depth image :return: None \"\"\" try:", "right-bottom corners min = sys.maxsize max = -sys.maxsize tl_pxl =", "- 45, br_pxl[0] + 46): img_out[h, w, :] = rep_color", "7 is clockwise if orientation > np.pi / 2: orientation", "br_pxl[0] + 46): img_out[h, w, :] = rep_color return img_out", "check the depth of its neighbors # by counter-clock wise", "default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs)", "240.91934], [0.0, 0.0, 1.0]]) # distortion of Realsense D435 distCoeffs", "the top 1 grasp prediction within the workspace res =", "= False # Get camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info',", "import opts from logger import Logger from utils.utils import AverageMeter", "corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for the", "if visualize: # print('aruco is located at mean position (%d,", "rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True): kps_pr = [] for", "cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2) rgb_img = cv2.arrowedLine(rgb_img,", "delta_x in range(-nei_range, nei_range + 1): for delta_y in range(-nei_range,", "M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0])", "len(kps_pr) == 0: return [0, 0, 0, 0] # sort", "kp_rm[1]) / 5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0,", "None: return [0, 0, 0, 0] f_w, f_h = 640./512.,", "corners: if corner[0] + corner[1] < min: min = corner[0]", "in range(-nei_range, nei_range + 1): for delta_y in range(-nei_range, nei_range", "std_msgs.msg import Float64MultiArray from sensor_msgs.msg import Image, CameraInfo from cv_bridge", "from datasets.dataset_factory import dataset_factory from detectors.detector_factory import detector_factory # transformation", "dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task] detector", "0, 255), 2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return", "depth = (depth_raw * 1000).astype(np.uint8) # get the current transformation", "nothing if ids_CL is None: return default_M_CL, None rvec_CL, tvec_CL,", "- kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) /", "kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3] * f_h)) if isWithinRange(kp_lm,", "cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255), 2) rgb_img =", "corners is not None: img_wo_at = aruco_tag_remove(img, corners) # replace", "__future__ import print_function import _init_paths import os import json import", "-0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522],", "inp_image = pre_process(img_wo_at, depth) # pass the image into the", "depth image :return: None \"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\")", "corner[0] + corner[1] tl_pxl = [int(corner[0]), int(corner[1])] if corner[0] +", "image to 3d by depth info :param pixel: x, y", "base to aruco tag M_BL = np.array([[1., 0., 0., 0.30000],", "opt = opts().parse() Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset)", "from std_msgs.msg import Bool from std_msgs.msg import Float64MultiArray from sensor_msgs.msg", "ret = ret[\"results\"] loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL,", "camera to aruco tag :param cameraMatrix: camera intrinsic matrix :param", "br_pxl = [int(corner[0]), int(corner[1])] # get the replacement pixel value", "np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]]) #", "Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector =", "if corners is not None: img_wo_at = aruco_tag_remove(img, corners) #", "aruco_dict_CL, parameters=parameters) # for the first frame, it may contain", "+ 46): img_out[h, w, :] = rep_color return img_out def", "depth image :param depth_scale: depth scale that trans raw data", "None: img_wo_at = aruco_tag_remove(img, corners) # replace blue channel with", "visualize=True): kps_pr = [] for category_id, preds in net_output.items(): if", "-np.pi - orientation else: orientation = -orientation # compute the", "int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map, M_CL,", "cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0,", "= depth_image[pixel[1], pixel[0]] # if the depth of the detected", ":param cameraMatrix: camera intrinsic matrix :param depth_image: depth image :param", "may contain nothing if ids_CL is None: return default_M_CL, None", "2: orientation = -np.pi - orientation else: orientation = -orientation", "<= 11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True):", "0, 0] # sort by the confidence score kps_pr =", "M_CL[3, :] = np.array([0, 0, 0, 1]) if visualize: #", "print(e) def isWithinRange(pxl, w, h): x, y = pxl[:] return", "Realsense D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934],", "2d pixel on the image to 3d by depth info", "from Kinect V1 :param rgb_data: RGB image :param depth_data: raw", "cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy()", "aruco tag :param cameraMatrix: camera intrinsic matrix :param depth_image: depth", "cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e: print(e) def isWithinRange(pxl, w,", "0, 0, 0] # sort by the confidence score kps_pr", "= cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255), 2) rgb_img", "depth_img inp_image = cv2.resize(inp_image, (256, 256)) return inp_image def kinect_rgbd_callback(rgb_data,", "(0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2,", "+ 1): nei = [pixel[0] + delta_x, pixel[1] + delta_y]", "left-middle and right-middle key-points lm_ep = (int(kp_lm[0] + (kp_rm[0] -", "(kp_lm[1] - kp_rm[1]) / 5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep,", "rgb and depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub =", "parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to rgb", "def project(pixel, depth_image, M_CL, M_BL, cameraMatrix): ''' project 2d pixel", "= dst_CL M_CL[:3, 3] = tvec_CL M_CL[3, :] = np.array([0,", "to 3d by depth info :param pixel: x, y :param", "datasets.dataset_factory import dataset_factory from detectors.detector_factory import detector_factory # transformation from", "np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw arrow for left-middle and right-middle", "camera to aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178],", "nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1]", "depth_img): inp_image = rgb_img inp_image[:, :, 0] = depth_img inp_image", "+ corner[1] > max: max = corner[0] + corner[1] br_pxl", "46): for w in range(tl_pxl[0] - 45, br_pxl[0] + 46):", "= project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix) center_3d = project(center, depth_map,", "ROS Image type to OpenCV Image type cv_bridge = CvBridge()", "w in range(tl_pxl[0] - 45, br_pxl[0] + 46): img_out[h, w,", "Save raw RGB and depth input from Kinect V1 :param", "import soft_nms from opts import opts from logger import Logger", "x, y :param M_CL: trans from camera to aruco tag", "grasp prediction within the workspace res = None for kp_pr", "is not None: img_wo_at = aruco_tag_remove(img, corners) # replace blue", "0] = depth_img inp_image = cv2.resize(inp_image, (256, 256)) return inp_image", "= 1 while depth == 0: for delta_x in range(-nei_range,", "import rospy from std_msgs.msg import Bool from std_msgs.msg import Float64MultiArray", "cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8) depth = (depth_raw", "data to mm :return: q_B: 3d coordinate of pixel with", "-0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0., 0.,", "= np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy() depth_raw", "= img_out[tl_pxl[0] - 10, tl_pxl[1] - 10, :] for h", "opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task] detector =", "import detector_factory # transformation from the robot base to aruco", "with the depth channel inp_image = pre_process(img_wo_at, depth) # pass", "if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640, 480): res =", "<= y <= 11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL,", "return default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix,", "if depth != 0: break nei_range += 1 pxl =", "# Subscribe to rgb and depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\",", "cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32)", "int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d", "parameters=parameters) # for the first frame, it may contain nothing", "int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d =", "the top-left and right-bottom corners min = sys.maxsize max =", "# motor 7 is clockwise if orientation > np.pi /", "located at mean position (%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix,", "> max: max = corner[0] + corner[1] br_pxl = [int(corner[0]),", "img_out[h, w, :] = rep_color return img_out def project(pixel, depth_image,", "corner[1] tl_pxl = [int(corner[0]), int(corner[1])] if corner[0] + corner[1] >", "input from Kinect V1 :param rgb_data: RGB image :param depth_data:", "(0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2,", "(int(kp_pr[2] * f_w), int(kp_pr[3] * f_h)) if isWithinRange(kp_lm, 640, 480)", "depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1, 0.1)", "the camera to aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293,", "[0.0, 0.0, 1.0]]) # distortion of Realsense D435 distCoeffs =", "11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True): kps_pr", "of its neighbors # by counter-clock wise nei_range = 1", "top 1 grasp prediction within the workspace res = None", "corners): img_out = rgb_image.copy() # find the top-left and right-bottom", "cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe to rgb and", "cv_depth_arr.copy() gray = img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8) #", "1 grasp prediction within the workspace res = None for", "corner[0] + corner[1] > max: max = corner[0] + corner[1]", "from the camera to aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369,", "return w/12. <= x <= 11*w/12 and h/12. <= y", "0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters =", "+ 1): for delta_y in range(-nei_range, nei_range + 1): nei", "is None: return default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0],", "= [int(corner[0]), int(corner[1])] if corner[0] + corner[1] > max: max", "Image type cv_bridge = CvBridge() cv2.WITH_QT = False # Get", "# draw left-middle, right-middle and center key-points rgb_img = cv2.circle(rgb_img,", "# get the current transformation from the camera to aruco", "dist] if __name__ == '__main__': # initialize ros node rospy.init_node(\"Static_grasping\")", "= (int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm,", "int(corner[1])] if corner[0] + corner[1] > max: max = corner[0]", "from logger import Logger from utils.utils import AverageMeter from datasets.dataset_factory", "tvec_CL M_CL[3, :] = np.array([0, 0, 0, 1]) if visualize:", "int(kp_pr[3] * f_h)) if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640,", "to aruco tag :param cameraMatrix: camera intrinsic matrix :param depth_image:", "break if depth != 0: break nei_range += 1 pxl", "reverse=True) # select the top 1 grasp prediction within the", "4)) M_CL[:3, :3] = dst_CL M_CL[:3, 3] = tvec_CL M_CL[3,", "img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8) # get the current", "to convert ROS Image type to OpenCV Image type cv_bridge", "kps[1], kps[2], kps[3], score]) # no detection if len(kps_pr) ==", "np.pi - orientation elif orientation < -np.pi / 2: orientation", "= detector.run(inp_image[:, :, :]) ret = ret[\"results\"] loc_ori = KpsToGrasppose(ret,", "1]) if visualize: # print('aruco is located at mean position", "aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0, :,", "import division from __future__ import print_function import _init_paths import os", "top-left and right-bottom corners min = sys.maxsize max = -sys.maxsize", "Detector opt = opts().parse() Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt,", "+= 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] *", "cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0, 255), 2) if visualize:", "is located at mean position (%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init,", "into the network ret = detector.run(inp_image[:, :, :]) ret =", "width dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw arrow for", "5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) / 5.)) rm_ep =", "1.00000]]) # default transformation from the camera to aruco tag", "the depth of the detected pixel is 0, check the", "pre_process(rgb_img, depth_img): inp_image = rgb_img inp_image[:, :, 0] = depth_img", "# by counter-clock wise nei_range = 1 while depth ==", "division from __future__ import print_function import _init_paths import os import", "606.30420, 240.91934], [0.0, 0.0, 1.0]]) # distortion of Realsense D435", "of perception result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray,", "with respect to base frame ''' depth = depth_image[pixel[1], pixel[0]]", "V1 :param rgb_data: RGB image :param depth_data: raw depth image", "M_BL, cameraMatrix): ''' project 2d pixel on the image to", "res = kp_pr break if res is None: return [0,", "pixel[0]] # if the depth of the detected pixel is", "rospy.init_node(\"Static_grasping\") # Bridge to convert ROS Image type to OpenCV", "if depth != 0: break if depth != 0: break", "default transformation from the camera to aruco tag default_M_CL =", "corner[1] > max: max = corner[0] + corner[1] br_pxl =", "[-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [", "= (int(kp_pr[0] * f_w), int(kp_pr[1] * f_h)) kp_rm = (int(kp_pr[2]", "tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image, corners):", "if the depth of the detected pixel is 0, check", "* f_w), int(kp_pr[1] * f_h)) kp_rm = (int(kp_pr[2] * f_w),", "= project(center, depth_map, M_CL, M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] -", "and right-middle key-points lm_ep = (int(kp_lm[0] + (kp_rm[0] - kp_lm[0])", "= rgb_img inp_image[:, :, 0] = depth_img inp_image = cv2.resize(inp_image,", "robot base to aruco tag M_BL = np.array([[1., 0., 0.,", "rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2) #", "# default transformation from the camera to aruco tag default_M_CL", "respect to base frame ''' depth = depth_image[pixel[1], pixel[0]] #", "return [0, 0, 0, 0] # sort by the confidence", "project(pixel, depth_image, M_CL, M_BL, cameraMatrix): ''' project 2d pixel on", "for the first frame, it may contain nothing if ids_CL", "# print('aruco is located at mean position (%d, %d)' %(mean_x", "preds: kps = pred[:4] score = pred[-1] kps_pr.append([kps[0], kps[1], kps[2],", "__future__ import division from __future__ import print_function import _init_paths import", "= cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0, 255), 2) if", "raw RGB and depth input from Kinect V1 :param rgb_data:", "from std_msgs.msg import Float64MultiArray from sensor_msgs.msg import Image, CameraInfo from", "M_BL = np.array([[1., 0., 0., 0.30000], [0., 1., 0., 0.32000],", "for corner in corners: if corner[0] + corner[1] < min:", "= (depth_raw * 1000).astype(np.uint8) # get the current transformation from", "depth of the detected pixel is 0, check the depth", "the depth channel inp_image = pre_process(img_wo_at, depth) # pass the", "left-middle, right-middle and center key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])),", "detector_factory # transformation from the robot base to aruco tag", "min = sys.maxsize max = -sys.maxsize tl_pxl = None br_pxl", "image_init, visualize=False): # parameters markerLength_CL = 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL)", "M_CL: trans from camera to aruco tag :param cameraMatrix: camera", ":return: None \"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth =", "# replace blue channel with the depth channel inp_image =", "= (int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1] +", "= sorted(kps_pr, key=lambda x: x[-1], reverse=True) # select the top", "type to OpenCV Image type cv_bridge = CvBridge() cv2.WITH_QT =", "q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B def pre_process(rgb_img,", "def get_M_CL_info(gray, image_init, visualize=False): # parameters markerLength_CL = 0.093 aruco_dict_CL", "avoid mis-detection if corners is not None: img_wo_at = aruco_tag_remove(img,", "# no detection if len(kps_pr) == 0: return [0, 0,", "np.pi / 2: orientation = np.pi - orientation elif orientation", "= np.pi - orientation elif orientation < -np.pi / 2:", "Kinect V1 :param rgb_data: RGB image :param depth_data: raw depth", "select the top 1 grasp prediction within the workspace res", "(0, 0, 0), 2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0,", "= [] for category_id, preds in net_output.items(): if len(preds) ==", ":3] = dst_CL M_CL[:3, 3] = tvec_CL M_CL[3, :] =", "image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts =", "orientation > np.pi / 2: orientation = np.pi - orientation", "rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL", "* depth, pixel[1] * depth, depth])) q_C = np.array([pxl[0], pxl[1],", "Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1,", "depth_data: raw depth image :return: None \"\"\" try: cv_rgb =", "message_filters import torch from external.nms import soft_nms from opts import", "= kp_pr break if res is None: return [0, 0,", "cv_rgb) img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8)", "pixel with respect to base frame ''' depth = depth_image[pixel[1],", "rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1] - 10, :] for", "x, y = pxl[:] return w/12. <= x <= 11*w/12", "sensor_msgs.msg import Image, CameraInfo from cv_bridge import CvBridge, CvBridgeError import", "project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d = project(kp_rm, depth_map, M_CL,", "orientation, dist] if __name__ == '__main__': # initialize ros node", "except CvBridgeError as e: print(e) def isWithinRange(pxl, w, h): x,", "- 45, br_pxl[1] + 46): for w in range(tl_pxl[0] -", "if __name__ == '__main__': # initialize ros node rospy.init_node(\"Static_grasping\") #", "soft_nms from opts import opts from logger import Logger from", "Realsense D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) #", "= aruco_tag_remove(img, corners) # replace blue channel with the depth", "inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw RGB and depth", "q_C = np.array([pxl[0], pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B", "orientation < -np.pi / 2: orientation = -np.pi - orientation", "= cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2) # draw", "depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image)", "info :param pixel: x, y :param M_CL: trans from camera", "depth_map, M_CL, M_BL, cameraMatrix) center_3d = project(center, depth_map, M_CL, M_BL,", "corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy() #", "lm_ep = (int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1]", "/ 5.)) rm_ep = (int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) /", "aruco tag M_BL = np.array([[1., 0., 0., 0.30000], [0., 1.,", "cameraMatrix, distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4))", "kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d = project(kp_rm,", "1): nei = [pixel[0] + delta_x, pixel[1] + delta_y] depth", "img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e:", "frame ''' depth = depth_image[pixel[1], pixel[0]] # if the depth", "f_w, f_h = 640./512., 480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm", "channel inp_image = pre_process(img_wo_at, depth) # pass the image into", "img_out def project(pixel, depth_image, M_CL, M_BL, cameraMatrix): ''' project 2d", "for delta_x in range(-nei_range, nei_range + 1): for delta_y in", "intrinsic matrix :param depth_image: depth image :param depth_scale: depth scale", "kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center =", "rgb_data: RGB image :param depth_data: raw depth image :return: None", "< min: min = corner[0] + corner[1] tl_pxl = [int(corner[0]),", "the workspace res = None for kp_pr in kps_pr: f_w,", "= cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy() gray = img.astype(np.uint8) depth =", "orientation elif orientation < -np.pi / 2: orientation = -np.pi", "= None for kp_pr in kps_pr: f_w, f_h = 640.", "640. / 512., 480. / 512. kp_lm = (int(kp_pr[0] *", "cv2.WITH_QT = False # Get camera calibration parameters cam_param =", "score kps_pr = sorted(kps_pr, key=lambda x: x[-1], reverse=True) # select", "/ 5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0),", "the confidence score kps_pr = sorted(kps_pr, key=lambda x: x[-1], reverse=True)", "f_w), int(kp_pr[1] * f_h)) kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3]", "255), 2) rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0,", "cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1], center_3d[2], orientation, dist] if __name__", "inp_image = rgb_img inp_image[:, :, 0] = depth_img inp_image =", "kps = pred[:4] score = pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3],", "if len(kps_pr) == 0: return [0, 0, 0, 0] #", "by the confidence score kps_pr = sorted(kps_pr, key=lambda x: x[-1],", "0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0., 0., 0., 1. ]]", "visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1], center_3d[2], orientation,", "480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center", "# Bridge to convert ROS Image type to OpenCV Image", "= np.array([0, 0, 0, 1]) if visualize: # print('aruco is", "inp_image[:, :, 0] = depth_img inp_image = cv2.resize(inp_image, (256, 256))", "for delta_y in range(-nei_range, nei_range + 1): nei = [pixel[0]", "for h in range(tl_pxl[1] - 45, br_pxl[1] + 46): for", "detectors.detector_factory import detector_factory # transformation from the robot base to", "lm_ep, (0, 0, 0), 2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep,", "parameters = aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters)", "= [pixel[0] + delta_x, pixel[1] + delta_y] depth = depth_image[nei[1],", "0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495,", "sort by the confidence score kps_pr = sorted(kps_pr, key=lambda x:", "= rgb_image.copy() # find the top-left and right-bottom corners min", "- 10, tl_pxl[1] - 10, :] for h in range(tl_pxl[1]", "detected pixel is 0, check the depth of its neighbors", "0, check the depth of its neighbors # by counter-clock", "x <= 11*w/12 and h/12. <= y <= 11*h/12 def", "ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for the first", "[0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]]) # distortion of Realsense", "pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3], score]) # no detection if", "/ 512. kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1] * f_h))", "kps_pr.append([kps[0], kps[1], kps[2], kps[3], score]) # no detection if len(kps_pr)", "import absolute_import from __future__ import division from __future__ import print_function", "rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0, 255), 2)", "np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) # motor 7 is", "in kps_pr: f_w, f_h = 640. / 512., 480. /", "kp_rm_3d[:2]) # draw arrow for left-middle and right-middle key-points lm_ep", "transformation from the robot base to aruco tag M_BL =", "0: for delta_x in range(-nei_range, nei_range + 1): for delta_y", "find the top-left and right-bottom corners min = sys.maxsize max", "x: x[-1], reverse=True) # select the top 1 grasp prediction", "center_3d[2], orientation, dist] if __name__ == '__main__': # initialize ros", "M_CL, M_BL, cameraMatrix): ''' project 2d pixel on the image", "aruco_tag_remove(img, corners) # replace blue channel with the depth channel", "GKNet Detector opt = opts().parse() Dataset = dataset_factory[opt.dataset] opt =", "f_h = 640./512., 480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm =", "depth) # pass the image into the network ret =", "= np.array([pxl[0], pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B =", "3d coordinate of pixel with respect to base frame '''", "kps[3], score]) # no detection if len(kps_pr) == 0: return", "as np import sys import rospy from std_msgs.msg import Bool", "False # Get camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo,", "import print_function import _init_paths import os import json import cv2", "rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255), 2)", "]] ) # camera intrinsic matrix of Realsense D435 cameraMatrix", "import dataset_factory from detectors.detector_factory import detector_factory # transformation from the", ") # camera intrinsic matrix of Realsense D435 cameraMatrix =", "dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\",", "delta_y] depth = depth_image[nei[1], nei[0]] if depth != 0: break", "for pred in preds: kps = pred[:4] score = pred[-1]", "image :return: None \"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth", "replace blue channel with the depth channel inp_image = pre_process(img_wo_at,", "# find the top-left and right-bottom corners min = sys.maxsize", "* f_w), int(kp_pr[3] * f_h)) if isWithinRange(kp_lm, 640, 480) and", "- kp_lm_3d[0]) # motor 7 is clockwise if orientation >", "kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) / 5.))", "range(-nei_range, nei_range + 1): nei = [pixel[0] + delta_x, pixel[1]", "max: max = corner[0] + corner[1] br_pxl = [int(corner[0]), int(corner[1])]", "10, tl_pxl[1] - 10, :] for h in range(tl_pxl[1] -", "(int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2))", "visualize=False): # parameters markerLength_CL = 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) #", ":param pixel: x, y :param M_CL: trans from camera to", "= (int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2),", "from opts import opts from logger import Logger from utils.utils", ",mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0,", "rospy from std_msgs.msg import Bool from std_msgs.msg import Float64MultiArray from", "result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init, visualize=False):", "as aruco import numpy as np import sys import rospy", "M_CL, corners = get_M_CL_info(gray, img, False) # remove aruco tag", "import message_filters import torch from external.nms import soft_nms from opts", "-0.8038495, 0.66103522], [ 0., 0., 0., 1. ]] ) #", "score = pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3], score]) # no", "h/12. <= y <= 11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL,", "0., 0., 0.30000], [0., 1., 0., 0.32000], [0., 0., 1.,", ":param depth_data: raw depth image :return: None \"\"\" try: cv_rgb", "RGB and depth input from Kinect V1 :param rgb_data: RGB", "range(tl_pxl[1] - 45, br_pxl[1] + 46): for w in range(tl_pxl[0]", "no detection if len(kps_pr) == 0: return [0, 0, 0,", "512., 480. / 512. kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1]", "to avoid mis-detection if corners is not None: img_wo_at =", "255), 2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0],", "Bridge to convert ROS Image type to OpenCV Image type", "Subscribe to rgb and depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image)", "in range(-nei_range, nei_range + 1): nei = [pixel[0] + delta_x,", "M_CL, M_BL, cameraMatrix, visualize=True): kps_pr = [] for category_id, preds", ":] for h in range(tl_pxl[1] - 45, br_pxl[1] + 46):", "input image to avoid mis-detection if corners is not None:", "* depth, depth])) q_C = np.array([pxl[0], pxl[1], pxl[2], 1]) q_L", "camera intrinsic matrix of Realsense D435 cameraMatrix = np.array([[607.47165, 0.0,", "from input image to avoid mis-detection if corners is not", "the current transformation from the camera to aruco tag M_CL,", "!= 0: break if depth != 0: break nei_range +=", "depth scale that trans raw data to mm :return: q_B:", "- orientation elif orientation < -np.pi / 2: orientation =", "import AverageMeter from datasets.dataset_factory import dataset_factory from detectors.detector_factory import detector_factory", "[center_3d[0], center_3d[1], center_3d[2], orientation, dist] if __name__ == '__main__': #", "\"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr", "np.array([pxl[0], pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L)", "corner in corners: if corner[0] + corner[1] < min: min", "channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts", "if ids_CL is None: return default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL", "depth, pixel[1] * depth, depth])) q_C = np.array([pxl[0], pxl[1], pxl[2],", "from __future__ import division from __future__ import print_function import _init_paths", "cameraMatrix): ''' project 2d pixel on the image to 3d", "1): for delta_y in range(-nei_range, nei_range + 1): nei =", "depth = depth_image[pixel[1], pixel[0]] # if the depth of the", "= corner[0] + corner[1] br_pxl = [int(corner[0]), int(corner[1])] # get", "45, br_pxl[0] + 46): img_out[h, w, :] = rep_color return", "# initialize GKNet Detector opt = opts().parse() Dataset = dataset_factory[opt.dataset]", "depth_image[nei[1], nei[0]] if depth != 0: break if depth !=", "\"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\")", "current transformation from the camera to aruco tag M_CL, corners", "pre_process(img_wo_at, depth) # pass the image into the network ret", "break if res is None: return [0, 0, 0, 0]", "= cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4)) M_CL[:3, :3] = dst_CL", "return [0, 0, 0, 0] f_w, f_h = 640./512., 480./512.", "from camera to aruco tag :param cameraMatrix: camera intrinsic matrix", "= project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d = project(kp_rm, depth_map,", "M_BL, cameraMatrix) center_3d = project(center, depth_map, M_CL, M_BL, cameraMatrix) orientation", "for w in range(tl_pxl[0] - 45, br_pxl[0] + 46): img_out[h,", "def pre_process(rgb_img, depth_img): inp_image = rgb_img inp_image[:, :, 0] =", "- kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) # motor 7 is clockwise", "5.)) rm_ep = (int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) / 5.),", "distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0, :, :] def", "clockwise if orientation > np.pi / 2: orientation = np.pi", "distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4)) M_CL[:3,", ":param depth_scale: depth scale that trans raw data to mm", "10, :] for h in range(tl_pxl[1] - 45, br_pxl[1] +", "br_pxl[1] + 46): for w in range(tl_pxl[0] - 45, br_pxl[0]", "nei[0]] if depth != 0: break if depth != 0:", "np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb)", "f_w), int(kp_pr[3] * f_h)) if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm,", "480): res = kp_pr break if res is None: return", "= depth_image[nei[1], nei[0]] if depth != 0: break if depth", "= None for corner in corners: if corner[0] + corner[1]", "q_B = M_BL.dot(q_L) return q_B def pre_process(rgb_img, depth_img): inp_image =", "right-middle key-points lm_ep = (int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) /", "delta_x, pixel[1] + delta_y] depth = depth_image[nei[1], nei[0]] if depth", "+ delta_y] depth = depth_image[nei[1], nei[0]] if depth != 0:", "0, 0] f_w, f_h = 640./512., 480./512. kp_lm = (int(res[0]*f_w),", "network ret = detector.run(inp_image[:, :, :]) ret = ret[\"results\"] loc_ori", "CameraInfo, timeout=None) # Subscribe to rgb and depth channel image_sub", "<filename>src/static_grasp_kt.py from __future__ import absolute_import from __future__ import division from", "Bool from std_msgs.msg import Float64MultiArray from sensor_msgs.msg import Image, CameraInfo", "= [int(corner[0]), int(corner[1])] # get the replacement pixel value rep_color", "distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) # initialize GKNet", "depth_scale: depth scale that trans raw data to mm :return:", "# distortion of Realsense D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134,", "= cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb,", "None \"\"\" try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data,", "os import json import cv2 import cv2.aruco as aruco import", "to aruco tag default_M_CL = np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912,", "print('aruco is located at mean position (%d, %d)' %(mean_x ,mean_y))", "pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return", "def isWithinRange(pxl, w, h): x, y = pxl[:] return w/12.", "center_3d = project(center, depth_map, M_CL, M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1]", "= Detector(opt) # Publisher of perception result pub_res = rospy.Publisher('/result',", "ids_CL is None: return default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL =", "(0, 0, 255), 2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img)", "= CvBridge() cv2.WITH_QT = False # Get camera calibration parameters", "import cv2 import cv2.aruco as aruco import numpy as np", "11*w/12 and h/12. <= y <= 11*h/12 def KpsToGrasppose(net_output, rgb_img,", "depth_image: depth image :param depth_scale: depth scale that trans raw", "aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create()", "mis-detection if corners is not None: img_wo_at = aruco_tag_remove(img, corners)", "arrow for left-middle and right-middle key-points lm_ep = (int(kp_lm[0] +", "corner[0] + corner[1] br_pxl = [int(corner[0]), int(corner[1])] # get the", "5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2)", "None: return default_M_CL, None rvec_CL, tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL,", "= -np.pi - orientation else: orientation = -orientation # compute", "aruco tag M_CL, corners = get_M_CL_info(gray, img, False) # remove", "M_CL[:3, :3] = dst_CL M_CL[:3, 3] = tvec_CL M_CL[3, :]", "cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr =", "right-middle and center key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2,", "depth, depth])) q_C = np.array([pxl[0], pxl[1], pxl[2], 1]) q_L =", "cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr", "ret[\"results\"] loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori)", "0.66103522], [ 0., 0., 0., 1. ]] ) # camera", "325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]]) # distortion of", "markerLength_CL = 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250)", "int(center[1])), 2, (0, 0, 255), 2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE)", "kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix) center_3d = project(center,", "= np.array([[1., 0., 0., 0.30000], [0., 1., 0., 0.32000], [0.,", "# compute the open width dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2])", "= tvec_CL M_CL[3, :] = np.array([0, 0, 0, 1]) if", "[ 0., 0., 0., 1. ]] ) # camera intrinsic", "depth input from Kinect V1 :param rgb_data: RGB image :param", "rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2) rgb_img", "0., 0., 1. ]] ) # camera intrinsic matrix of", "sys.maxsize max = -sys.maxsize tl_pxl = None br_pxl = None", "import torch from external.nms import soft_nms from opts import opts", "aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy() # find the top-left and", "print(opt) Detector = detector_factory[opt.task] detector = Detector(opt) # Publisher of", "= pre_process(img_wo_at, depth) # pass the image into the network", "e: print(e) def isWithinRange(pxl, w, h): x, y = pxl[:]", "= np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) # initialize GKNet Detector", "logger import Logger from utils.utils import AverageMeter from datasets.dataset_factory import", "the open width dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw", "tvec_CL, _objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL =", "Publisher of perception result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def", "max = corner[0] + corner[1] br_pxl = [int(corner[0]), int(corner[1])] #", "orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) # motor", "0., 1., -0.0450], [0., 0., 0., 1.00000]]) # default transformation", "matrix :param depth_image: depth image :param depth_scale: depth scale that", "counter-clock wise nei_range = 1 while depth == 0: for", "cv2.resize(inp_image, (256, 256)) return inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save", "depth channel inp_image = pre_process(img_wo_at, depth) # pass the image", "+ (kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1] -", "continue for pred in preds: kps = pred[:4] score =", "- kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) /", "\"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) #", "0: break nei_range += 1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] *", "-0.07926594, -0.8038495, 0.66103522], [ 0., 0., 0., 1. ]] )", "Image type to OpenCV Image type cv_bridge = CvBridge() cv2.WITH_QT", "at mean position (%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs,", "pub_res.publish(loc_ori) except CvBridgeError as e: print(e) def isWithinRange(pxl, w, h):", "cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] - kp_lm_3d[0]) #", ":param rgb_data: RGB image :param depth_data: raw depth image :return:", "# draw arrow for left-middle and right-middle key-points lm_ep =", "None br_pxl = None for corner in corners: if corner[0]", "import Image, CameraInfo from cv_bridge import CvBridge, CvBridgeError import message_filters", "= np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B def pre_process(rgb_img, depth_img):", "= (int(kp_pr[2] * f_w), int(kp_pr[3] * f_h)) if isWithinRange(kp_lm, 640,", "int(kp_rm[1])), 2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(center[0]),", "CameraInfo from cv_bridge import CvBridge, CvBridgeError import message_filters import torch", "<= x <= 11*w/12 and h/12. <= y <= 11*h/12", "= pred[:4] score = pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3], score])", "+ (kp_rm[1] - kp_lm[1]) / 5.)) rm_ep = (int(kp_rm[0] +", "cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0), 2) # draw left-middle,", "0, 1]) if visualize: # print('aruco is located at mean", "Float64MultiArray from sensor_msgs.msg import Image, CameraInfo from cv_bridge import CvBridge,", "by depth info :param pixel: x, y :param M_CL: trans", "raw data to mm :return: q_B: 3d coordinate of pixel", "kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw RGB and depth input from", "M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e: print(e) def", "== 0: continue for pred in preds: kps = pred[:4]", "rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init, visualize=False): # parameters markerLength_CL", "cameraMatrix: camera intrinsic matrix :param depth_image: depth image :param depth_scale:", "KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as", "y <= 11*h/12 def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix,", "np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] * depth, depth])) q_C =", "(int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img,", "cv_bridge import CvBridge, CvBridgeError import message_filters import torch from external.nms", "(kp_rm[1] - kp_lm[1]) / 5.)) rm_ep = (int(kp_rm[0] + (kp_lm[0]", "int(kp_pr[1] * f_h)) kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3] *", "__future__ import absolute_import from __future__ import division from __future__ import", "dataset_factory from detectors.detector_factory import detector_factory # transformation from the robot", "+ 46): for w in range(tl_pxl[0] - 45, br_pxl[0] +", "M_CL[:3, 3] = tvec_CL M_CL[3, :] = np.array([0, 0, 0,", "kp_pr in kps_pr: f_w, f_h = 640. / 512., 480.", ":] = rep_color return img_out def project(pixel, depth_image, M_CL, M_BL,", "cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255), 2) rgb_img =", "aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL, ids_CL,", "scale that trans raw data to mm :return: q_B: 3d", "cv2 import cv2.aruco as aruco import numpy as np import", "M_CL, M_BL, cameraMatrix) kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix)", "# Get camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None)", "score]) # no detection if len(kps_pr) == 0: return [0,", "isWithinRange(kp_rm, 640, 480): res = kp_pr break if res is", "detector = Detector(opt) # Publisher of perception result pub_res =", "category_id, preds in net_output.items(): if len(preds) == 0: continue for", "1 pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] * depth,", "pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C) q_B = M_BL.dot(q_L) return q_B", "try: cv_rgb = cv_bridge.imgmsg_to_cv2(rgb_data, \"bgr8\") cv_depth = cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr", "of the detected pixel is 0, check the depth of", "np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy() depth_raw =", "depth == 0: for delta_x in range(-nei_range, nei_range + 1):", "return q_B def pre_process(rgb_img, depth_img): inp_image = rgb_img inp_image[:, :,", "detector.run(inp_image[:, :, :]) ret = ret[\"results\"] loc_ori = KpsToGrasppose(ret, img,", "nei = [pixel[0] + delta_x, pixel[1] + delta_y] depth =", "[0., 1., 0., 0.32000], [0., 0., 1., -0.0450], [0., 0.,", "= None br_pxl = None for corner in corners: if", "in corners: if corner[0] + corner[1] < min: min =", "640, 480): res = kp_pr break if res is None:", "cameraMatrix) kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix) center_3d =", "kp_lm_3d[0]) # motor 7 is clockwise if orientation > np.pi", "255), 2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0,", "the image into the network ret = detector.run(inp_image[:, :, :])", "= -orientation # compute the open width dist = np.linalg.norm(kp_lm_3d[:2]", "+ delta_x, pixel[1] + delta_y] depth = depth_image[nei[1], nei[0]] if", "1.0]]) # distortion of Realsense D435 distCoeffs = np.array([0.08847, -0.04283,", "if orientation > np.pi / 2: orientation = np.pi -", "import Logger from utils.utils import AverageMeter from datasets.dataset_factory import dataset_factory", "AverageMeter from datasets.dataset_factory import dataset_factory from detectors.detector_factory import detector_factory #", "3d by depth info :param pixel: x, y :param M_CL:", "M_BL, cameraMatrix) kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL, cameraMatrix) center_3d", "rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255), 2)", "import cv2.aruco as aruco import numpy as np import sys", "cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4)) M_CL[:3, :3] = dst_CL M_CL[:3,", ":] = np.array([0, 0, 0, 1]) if visualize: # print('aruco", "kps[2], kps[3], score]) # no detection if len(kps_pr) == 0:", "> np.pi / 2: orientation = np.pi - orientation elif", "0., 1.00000]]) # default transformation from the camera to aruco", "nei_range = 1 while depth == 0: for delta_x in", "if corner[0] + corner[1] > max: max = corner[0] +", "open width dist = np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw arrow", "2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])),", "2, (0, 0, 255), 2) if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual',", "= aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for the first frame, it", "D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0]) # initialize", "def KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True): kps_pr =", "from the camera to aruco tag M_CL, corners = get_M_CL_info(gray,", "workspace res = None for kp_pr in kps_pr: f_w, f_h", "depth_image, M_CL, M_BL, cameraMatrix): ''' project 2d pixel on the", "for left-middle and right-middle key-points lm_ep = (int(kp_lm[0] + (kp_rm[0]", "cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1], center_3d[2], orientation, dist]", "int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) / 5.)) rgb_img = cv2.arrowedLine(rgb_img,", "to base frame ''' depth = depth_image[pixel[1], pixel[0]] # if", ":, :]) ret = ret[\"results\"] loc_ori = KpsToGrasppose(ret, img, depth_raw,", "within the workspace res = None for kp_pr in kps_pr:", "project(center, depth_map, M_CL, M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1],", "cv_bridge = CvBridge() cv2.WITH_QT = False # Get camera calibration", "cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img = cv_rgb_arr.copy() depth_raw = cv_depth_arr.copy()", "(0, 0, 0), 2) # draw left-middle, right-middle and center", "pass the image into the network ret = detector.run(inp_image[:, :,", "ros node rospy.init_node(\"Static_grasping\") # Bridge to convert ROS Image type", "aruco import numpy as np import sys import rospy from", "(kp_rm[0] - kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1])", "0: continue for pred in preds: kps = pred[:4] score", ":param depth_image: depth image :param depth_scale: depth scale that trans", "= -sys.maxsize tl_pxl = None br_pxl = None for corner", "while depth == 0: for delta_x in range(-nei_range, nei_range +", "tl_pxl[1] - 10, :] for h in range(tl_pxl[1] - 45,", "cv2.aruco as aruco import numpy as np import sys import", "1000).astype(np.uint8) # get the current transformation from the camera to", "pxl = np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] * depth, depth]))", "False) # remove aruco tag from input image to avoid", "isWithinRange(pxl, w, h): x, y = pxl[:] return w/12. <=", "- orientation else: orientation = -orientation # compute the open", "/ 512., 480. / 512. kp_lm = (int(kp_pr[0] * f_w),", "_objPoints_CL = aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL)", "tag M_BL = np.array([[1., 0., 0., 0.30000], [0., 1., 0.,", "parameters markerLength_CL = 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL =", "depth_data): \"\"\" Save raw RGB and depth input from Kinect", "f_h = 640. / 512., 480. / 512. kp_lm =", "1., 0., 0.32000], [0., 0., 1., -0.0450], [0., 0., 0.,", "KpsToGrasppose(net_output, rgb_img, depth_map, M_CL, M_BL, cameraMatrix, visualize=True): kps_pr = []", "res is None: return [0, 0, 0, 0] f_w, f_h", "0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594, -0.8038495, 0.66103522], [ 0.,", "= dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task]", "torch from external.nms import soft_nms from opts import opts from", "def aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy() # find the top-left", "center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL,", "to OpenCV Image type cv_bridge = CvBridge() cv2.WITH_QT = False", "depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e: print(e)", "pred in preds: kps = pred[:4] score = pred[-1] kps_pr.append([kps[0],", "Get camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) #", "cameraMatrix, distCoeffs, rvec_CL, tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0, :, :]", "pixel value rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1] - 10,", "import sys import rospy from std_msgs.msg import Bool from std_msgs.msg", "matrix of Realsense D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0,", "is None: return [0, 0, 0, 0] f_w, f_h =", "if res is None: return [0, 0, 0, 0] f_w,", "corner[1] br_pxl = [int(corner[0]), int(corner[1])] # get the replacement pixel", "[int(corner[0]), int(corner[1])] # get the replacement pixel value rep_color =", "h in range(tl_pxl[1] - 45, br_pxl[1] + 46): for w", "if corner[0] + corner[1] < min: min = corner[0] +", "[int(corner[0]), int(corner[1])] if corner[0] + corner[1] > max: max =", "range(-nei_range, nei_range + 1): for delta_y in range(-nei_range, nei_range +", "M_CL, M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0] -", "0., 0., 0., 1. ]] ) # camera intrinsic matrix", "to mm :return: q_B: 3d coordinate of pixel with respect", "aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) # for", "Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init, visualize=False): # parameters markerLength_CL =", "orientation = np.pi - orientation elif orientation < -np.pi /", "loc_ori = KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except", "RGB image :param depth_data: raw depth image :return: None \"\"\"", "(depth_raw * 1000).astype(np.uint8) # get the current transformation from the", "kp_lm[1]) / 5.)) rm_ep = (int(kp_rm[0] + (kp_lm[0] - kp_rm[0])", "frame, it may contain nothing if ids_CL is None: return", "/ 5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) / 5.)) rm_ep", "in net_output.items(): if len(preds) == 0: continue for pred in", "depth != 0: break if depth != 0: break nei_range", "45, br_pxl[1] + 46): for w in range(tl_pxl[0] - 45,", "= np.linalg.norm(kp_lm_3d[:2] - kp_rm_3d[:2]) # draw arrow for left-middle and", "Logger from utils.utils import AverageMeter from datasets.dataset_factory import dataset_factory from", "initialize ros node rospy.init_node(\"Static_grasping\") # Bridge to convert ROS Image", "1 while depth == 0: for delta_x in range(-nei_range, nei_range", "the image to 3d by depth info :param pixel: x,", "640, 480) and isWithinRange(kp_rm, 640, 480): res = kp_pr break", "sys import rospy from std_msgs.msg import Bool from std_msgs.msg import", "for kp_pr in kps_pr: f_w, f_h = 640. / 512.,", "img, False) # remove aruco tag from input image to", "node rospy.init_node(\"Static_grasping\") # Bridge to convert ROS Image type to", "len(preds) == 0: continue for pred in preds: kps =", "* f_h)) kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3] * f_h))", "pred[:4] score = pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3], score]) #", "# pass the image into the network ret = detector.run(inp_image[:,", "from utils.utils import AverageMeter from datasets.dataset_factory import dataset_factory from detectors.detector_factory", "depth_map, M_CL, M_BL, cameraMatrix, visualize=True): kps_pr = [] for category_id,", "rvec_CL, tvec_CL, markerLength_CL) return M_CL, corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image,", "f_h)) kp_rm = (int(kp_pr[2] * f_w), int(kp_pr[3] * f_h)) if", "pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init, visualize=False): #", "(int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1] + (kp_lm[1]", "import _init_paths import os import json import cv2 import cv2.aruco", "net_output.items(): if len(preds) == 0: continue for pred in preds:", "= 640./512., 480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w),", "get_M_CL_info(gray, img, False) # remove aruco tag from input image", "-0.0450], [0., 0., 0., 1.00000]]) # default transformation from the", "key-points lm_ep = (int(kp_lm[0] + (kp_rm[0] - kp_lm[0]) / 5.),", "return M_CL, corners_CL[0][0, :, :] def aruco_tag_remove(rgb_image, corners): img_out =", "prediction within the workspace res = None for kp_pr in", "f_w, f_h = 640. / 512., 480. / 512. kp_lm", "# remove aruco tag from input image to avoid mis-detection", "the first frame, it may contain nothing if ids_CL is", "depth])) q_C = np.array([pxl[0], pxl[1], pxl[2], 1]) q_L = np.linalg.inv(M_CL).dot(q_C)", "elif orientation < -np.pi / 2: orientation = -np.pi -", "get the current transformation from the camera to aruco tag", "-0.04283, 0.00134, -0.00102, 0.0]) # initialize GKNet Detector opt =", "its neighbors # by counter-clock wise nei_range = 1 while", "return [center_3d[0], center_3d[1], center_3d[2], orientation, dist] if __name__ == '__main__':", "f_h)) if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640, 480): res", "kp_rm, rm_ep, (0, 0, 0), 2) # draw left-middle, right-middle", "import Float64MultiArray from sensor_msgs.msg import Image, CameraInfo from cv_bridge import", "dst_CL M_CL[:3, 3] = tvec_CL M_CL[3, :] = np.array([0, 0,", "0: break if depth != 0: break nei_range += 1", "-0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768, -0.07926594,", "get_M_CL_info(gray, image_init, visualize=False): # parameters markerLength_CL = 0.093 aruco_dict_CL =", "< -np.pi / 2: orientation = -np.pi - orientation else:", "from the robot base to aruco tag M_BL = np.array([[1.,", "(int(res[2]*f_w), int(res[3]*f_h)) center = (int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map,", "= pxl[:] return w/12. <= x <= 11*w/12 and h/12.", "2) rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0, 0, 255),", "center_3d[1], center_3d[2], orientation, dist] if __name__ == '__main__': # initialize", "confidence score kps_pr = sorted(kps_pr, key=lambda x: x[-1], reverse=True) #", "-0.00102, 0.0]) # initialize GKNet Detector opt = opts().parse() Dataset", "= img.astype(np.uint8) depth = (depth_raw * 1000).astype(np.uint8) # get the", "image to avoid mis-detection if corners is not None: img_wo_at", "0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]]) # distortion", "0, 0), 2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0,", "if len(preds) == 0: continue for pred in preds: kps", "print_function import _init_paths import os import json import cv2 import", "0] f_w, f_h = 640./512., 480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h))", "np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr = np.nan_to_num(cv_depth_arr)", "message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub], 1, 0.1) ts.registerCallback(kinect_rgbd_callback) rospy.spin()", "and depth input from Kinect V1 :param rgb_data: RGB image", "channel with the depth channel inp_image = pre_process(img_wo_at, depth) #", ":, :] def aruco_tag_remove(rgb_image, corners): img_out = rgb_image.copy() # find", "cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1], center_3d[2], orientation, dist] if", "range(tl_pxl[0] - 45, br_pxl[0] + 46): img_out[h, w, :] =", "q_B: 3d coordinate of pixel with respect to base frame", "0), 2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm, rm_ep, (0, 0, 0),", "(int(center[0]), int(center[1])), 2, (0, 0, 255), 2) if visualize: cv2.namedWindow('visual',", "CvBridge, CvBridgeError import message_filters import torch from external.nms import soft_nms", "type cv_bridge = CvBridge() cv2.WITH_QT = False # Get camera", "initialize GKNet Detector opt = opts().parse() Dataset = dataset_factory[opt.dataset] opt", "queue_size=10) def get_M_CL_info(gray, image_init, visualize=False): # parameters markerLength_CL = 0.093", "min = corner[0] + corner[1] tl_pxl = [int(corner[0]), int(corner[1])] if", "[0, 0, 0, 0] f_w, f_h = 640./512., 480./512. kp_lm", "pixel on the image to 3d by depth info :param", "dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4)) M_CL[:3, :3]", "image :param depth_scale: depth scale that trans raw data to", "depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d = project(kp_rm, depth_map, M_CL, M_BL,", "= opts().parse() Dataset = dataset_factory[opt.dataset] opt = opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt)", "trans from camera to aruco tag :param cameraMatrix: camera intrinsic", "neighbors # by counter-clock wise nei_range = 1 while depth", "from sensor_msgs.msg import Image, CameraInfo from cv_bridge import CvBridge, CvBridgeError", "in preds: kps = pred[:4] score = pred[-1] kps_pr.append([kps[0], kps[1],", "= (int(kp_rm[0] + (kp_lm[0] - kp_rm[0]) / 5.), int(kp_rm[1] +", "''' depth = depth_image[pixel[1], pixel[0]] # if the depth of", "and center key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0,", "center key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0,", "tag :param cameraMatrix: camera intrinsic matrix :param depth_image: depth image", "import json import cv2 import cv2.aruco as aruco import numpy", "0., 1. ]] ) # camera intrinsic matrix of Realsense", "return img_out def project(pixel, depth_image, M_CL, M_BL, cameraMatrix): ''' project", "= aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL", "512. kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1] * f_h)) kp_rm", "if visualize: cv2.namedWindow('visual', cv2.WINDOW_AUTOSIZE) cv2.imshow('visual', rgb_img) return [center_3d[0], center_3d[1], center_3d[2],", "# camera intrinsic matrix of Realsense D435 cameraMatrix = np.array([[607.47165,", "-sys.maxsize tl_pxl = None br_pxl = None for corner in", "[0., 0., 1., -0.0450], [0., 0., 0., 1.00000]]) # default", "Detector = detector_factory[opt.task] detector = Detector(opt) # Publisher of perception", "rgb_image.copy() # find the top-left and right-bottom corners min =", "M_CL, M_BL, cameraMatrix) center_3d = project(center, depth_map, M_CL, M_BL, cameraMatrix)", "kp_lm[0]) / 5.), int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) / 5.))", "480) and isWithinRange(kp_rm, 640, 480): res = kp_pr break if", "2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]), int(kp_rm[1])), 2, (0, 0, 255),", "= message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub,", "= np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth, dtype=np.float32) # cv_depth_arr =", "opts from logger import Logger from utils.utils import AverageMeter from", "+ corner[1] br_pxl = [int(corner[0]), int(corner[1])] # get the replacement", "+ corner[1] < min: min = corner[0] + corner[1] tl_pxl", "= M_BL.dot(q_L) return q_B def pre_process(rgb_img, depth_img): inp_image = rgb_img", "np.array([[1., 0., 0., 0.30000], [0., 1., 0., 0.32000], [0., 0.,", "markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4,", "* f_h)) if isWithinRange(kp_lm, 640, 480) and isWithinRange(kp_rm, 640, 480):", "is clockwise if orientation > np.pi / 2: orientation =", "jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL = np.zeros((4, 4)) M_CL[:3, :3] =", "value rep_color = img_out[tl_pxl[0] - 10, tl_pxl[1] - 10, :]", "cameraMatrix, visualize=True): kps_pr = [] for category_id, preds in net_output.items():", "= cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255), 2) rgb_img", ":return: q_B: 3d coordinate of pixel with respect to base", "remove aruco tag from input image to avoid mis-detection if", "img_wo_at = aruco_tag_remove(img, corners) # replace blue channel with the", "as e: print(e) def isWithinRange(pxl, w, h): x, y =", "0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(center[0]), int(center[1])), 2, (0,", "trans raw data to mm :return: q_B: 3d coordinate of", "480. / 512. kp_lm = (int(kp_pr[0] * f_w), int(kp_pr[1] *", "aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL,", "res = None for kp_pr in kps_pr: f_w, f_h =", "kp_rm_3d[0] - kp_lm_3d[0]) # motor 7 is clockwise if orientation", "M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError as e: print(e) def isWithinRange(pxl,", "kp_pr break if res is None: return [0, 0, 0,", "from __future__ import print_function import _init_paths import os import json", "rgb_img inp_image[:, :, 0] = depth_img inp_image = cv2.resize(inp_image, (256,", "= opts().update_dataset_info_and_set_heads(opt, Dataset) print(opt) Detector = detector_factory[opt.task] detector = Detector(opt)", "orientation = -np.pi - orientation else: orientation = -orientation #", "detection if len(kps_pr) == 0: return [0, 0, 0, 0]", "timeout=None) # Subscribe to rgb and depth channel image_sub =", "pixel: x, y :param M_CL: trans from camera to aruco", "5.), int(kp_rm[1] + (kp_lm[1] - kp_rm[1]) / 5.)) rgb_img =", "motor 7 is clockwise if orientation > np.pi / 2:", "= aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray,", "# for the first frame, it may contain nothing if", "absolute_import from __future__ import division from __future__ import print_function import", "from detectors.detector_factory import detector_factory # transformation from the robot base", "0.0, 1.0]]) # distortion of Realsense D435 distCoeffs = np.array([0.08847,", "tl_pxl = None br_pxl = None for corner in corners:", "np.array([[-0.07134498, -0.99639369, 0.0459293, -0.13825178], [-0.8045912, 0.03027403, -0.59305689, 0.08434352], [ 0.58952768,", "int(kp_lm[1])), 2, (0, 0, 255), 2) rgb_img = cv2.circle(rgb_img, (int(kp_rm[0]),", "256)) return inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\" Save raw RGB", "= KpsToGrasppose(ret, img, depth_raw, M_CL, M_BL, cameraMatrix) pub_res.publish(loc_ori) except CvBridgeError", "to aruco tag M_CL, corners = get_M_CL_info(gray, img, False) #", "np.zeros((4, 4)) M_CL[:3, :3] = dst_CL M_CL[:3, 3] = tvec_CL", "# aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints", "= np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0, 0.0, 1.0]])", "project 2d pixel on the image to 3d by depth", "D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420, 240.91934], [0.0,", "- kp_rm[1]) / 5.)) rgb_img = cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0,", "x[-1], reverse=True) # select the top 1 grasp prediction within", "on the image to 3d by depth info :param pixel:", "kps_pr = sorted(kps_pr, key=lambda x: x[-1], reverse=True) # select the", "M_BL.dot(q_L) return q_B def pre_process(rgb_img, depth_img): inp_image = rgb_img inp_image[:,", "aruco.estimatePoseSingleMarkers(corners_CL[0], markerLength_CL, cameraMatrix, distCoeffs) dst_CL, jacobian_CL = cv2.Rodrigues(rvec_CL) M_CL =", "__name__ == '__main__': # initialize ros node rospy.init_node(\"Static_grasping\") # Bridge", "pixel[1] * depth, depth])) q_C = np.array([pxl[0], pxl[1], pxl[2], 1])", "key-points rgb_img = cv2.circle(rgb_img, (int(kp_lm[0]), int(kp_lm[1])), 2, (0, 0, 255),", "message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\", Image) ts = message_filters.ApproximateTimeSynchronizer([image_sub, depth_sub],", "transformation from the camera to aruco tag default_M_CL = np.array([[-0.07134498,", "= cv2.resize(inp_image, (256, 256)) return inp_image def kinect_rgbd_callback(rgb_data, depth_data): \"\"\"", "y :param M_CL: trans from camera to aruco tag :param", "0., 0.30000], [0., 1., 0., 0.32000], [0., 0., 1., -0.0450],", "of Realsense D435 distCoeffs = np.array([0.08847, -0.04283, 0.00134, -0.00102, 0.0])", "perception result pub_res = rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init,", "= aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL,", "0: return [0, 0, 0, 0] # sort by the", "(int((kp_lm[0]+kp_rm[0])/2), int((kp_lm[1]+kp_rm[1])/2)) kp_lm_3d = project(kp_lm, depth_map, M_CL, M_BL, cameraMatrix) kp_rm_3d", "/ 2: orientation = np.pi - orientation elif orientation <", "# cv_depth_arr = np.nan_to_num(cv_depth_arr) cv2.imshow(\"Depth\", cv_depth) cv2.imshow(\"RGB\", cv_rgb) img =", "aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters = aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints =", "external.nms import soft_nms from opts import opts from logger import", "= np.linalg.inv(cameraMatrix).dot( np.array([pixel[0] * depth, pixel[1] * depth, depth])) q_C", "- kp_rm_3d[:2]) # draw arrow for left-middle and right-middle key-points", "depth of its neighbors # by counter-clock wise nei_range =", "not None: img_wo_at = aruco_tag_remove(img, corners) # replace blue channel", "kp_lm, lm_ep, (0, 0, 0), 2) rgb_img = cv2.arrowedLine(rgb_img, kp_rm,", "corners min = sys.maxsize max = -sys.maxsize tl_pxl = None", "CvBridgeError import message_filters import torch from external.nms import soft_nms from", "corner[1] < min: min = corner[0] + corner[1] tl_pxl =", "the network ret = detector.run(inp_image[:, :, :]) ret = ret[\"results\"]", "int(kp_lm[1] + (kp_rm[1] - kp_lm[1]) / 5.)) rm_ep = (int(kp_rm[0]", "detector_factory[opt.task] detector = Detector(opt) # Publisher of perception result pub_res", "orientation = -orientation # compute the open width dist =", "from external.nms import soft_nms from opts import opts from logger", "= aruco.DetectorParameters_create() corners_CL, ids_CL, rejectedImgPoints = aruco.detectMarkers(gray, aruco_dict_CL, parameters=parameters) #", "of Realsense D435 cameraMatrix = np.array([[607.47165, 0.0, 325.90064], [0.0, 606.30420,", "wise nei_range = 1 while depth == 0: for delta_x", "= pred[-1] kps_pr.append([kps[0], kps[1], kps[2], kps[3], score]) # no detection", "rgb_img) return [center_3d[0], center_3d[1], center_3d[2], orientation, dist] if __name__ ==", "depth_map, M_CL, M_BL, cameraMatrix) orientation = np.arctan2(kp_rm_3d[1] - kp_lm_3d[1], kp_rm_3d[0]", "get the replacement pixel value rep_color = img_out[tl_pxl[0] - 10,", "and depth channel image_sub = message_filters.Subscriber(\"/camera/rgb/image_rect_color\", Image) depth_sub = message_filters.Subscriber(\"/camera/depth_registered/image\",", "= 0.093 aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_ARUCO_ORIGINAL) # aruco_dict_CL = aruco.Dictionary_get(aruco.DICT_6X6_250) parameters", "= get_M_CL_info(gray, img, False) # remove aruco tag from input", "CvBridgeError as e: print(e) def isWithinRange(pxl, w, h): x, y", "for category_id, preds in net_output.items(): if len(preds) == 0: continue", "# if the depth of the detected pixel is 0,", "mean position (%d, %d)' %(mean_x ,mean_y)) aruco.drawAxis(image_init, cameraMatrix, distCoeffs, rvec_CL,", "by counter-clock wise nei_range = 1 while depth == 0:", "- kp_lm[1]) / 5.)) rm_ep = (int(kp_rm[0] + (kp_lm[0] -", "q_B def pre_process(rgb_img, depth_img): inp_image = rgb_img inp_image[:, :, 0]", "= rospy.Publisher('/result', Float64MultiArray, queue_size=10) def get_M_CL_info(gray, image_init, visualize=False): # parameters", "[0., 0., 0., 1.00000]]) # default transformation from the camera", "# select the top 1 grasp prediction within the workspace", "= cv2.arrowedLine(rgb_img, kp_lm, lm_ep, (0, 0, 0), 2) rgb_img =", "= cv_bridge.imgmsg_to_cv2(depth_data, \"32FC1\") cv_rgb_arr = np.array(cv_rgb, dtype=np.uint8) cv_depth_arr = np.array(cv_depth,", "* 1000).astype(np.uint8) # get the current transformation from the camera", "camera calibration parameters cam_param = rospy.wait_for_message('/camera/rgb/camera_info', CameraInfo, timeout=None) # Subscribe", "and right-bottom corners min = sys.maxsize max = -sys.maxsize tl_pxl", "2) # draw left-middle, right-middle and center key-points rgb_img =", "640./512., 480./512. kp_lm = (int(res[0]*f_w), int(res[1]*f_h)) kp_rm = (int(res[2]*f_w), int(res[3]*f_h))", "std_msgs.msg import Bool from std_msgs.msg import Float64MultiArray from sensor_msgs.msg import" ]
[ "0, 255).astype(dtype) lut_val = np.clip(x * r[2], 0, 255).astype(dtype) im_hsv", "augment_params.get('adjust_brightness') else None, 'invert' : RandAugment.invert if augment_params.get('invert') else None,", "RandAugment.contrast if augment_params.get('contrast') else None, 'shearX' : RandAugment.shear_x if augment_params.get('shearX')", "= np.array([[1, level, 0], [0, 1 , 0], [0, 0", "v) # print(self.policy) def mixup(img1,img2,factor): img = img1.astype('float')* factor +", "cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level,", "= np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size = (img_size,img_size) return", "if augment_params.get('cutout') else None, 'rotate' : RandAugment.rotate if augment_params.get('rotate') else", "M, (width, height), borderMode=mode) return translate_img # def sharpness(img,): #", "if augment_params.get('shearX') else None, 'shearY' : RandAugment.shear_y if augment_params.get('shearY') else", "else None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None, 'invert' :", "level degenerate = np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return img def", "np.array([[1, level, 0], [0, 1 , 0], [0, 0 ,", "augment_params.get('shearX') else None, 'shearY' : RandAugment.shear_y if augment_params.get('shearY') else None,", "random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray): images img_size (int,list,tuple):", "img.astype('uint8') return img def augment_fliplr(img,level): if random.random() < level: return", "# print(augmenter) return img def augmentation_test(): img_org = cv2.imread('test.jpg') import", "tileGridSize=(8, 8)) yuv[:, :, 0] = c.apply(yuv[:, :, 0]) else:", "= option_mode[mode] sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return", "return np.where(image <= threshold, image, 255 - image) def posterize(img,", "np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None): return 255-img", "delta = height - width if delta > 0: img", "return img2 def cut_25_above(img,level=0.25): ratio = level height,width,_ = img.shape", "width if delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255)", "def cut_25_under(img,level=0.25): ratio = level height,width,_ = img.shape new_height =", "2, h / 2) # Perform the rotation M =", "1, 1]], dtype=tf.float32, # shape=[3, 3, 1, 1]) / 13.", "mode = option_mode[mode] sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)", "target size \"\"\" if padding: height,width,_ = img.shape delta =", "[[1, 1, 1], # [1, 5, 1], # [1, 1,", "self.ARGS_LIMIT[augmenter] level = min_arg + (max_arg - min_arg) * level", ":, 0] = c.apply(yuv[:, :, 0]) else: yuv[:, :, 0]", "# [1, 5, 1], # [1, 1, 1]], dtype=tf.float32, #", "vgain: r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain]", "= np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img def rotate(image, level=45,", "# kernel = np.array( # [[1, 1, 1], # [1,", "1 , 0], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img,", "{ 'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize'", "/ 13. # cv2. def cutout(img,level,**kwargs): img = img.copy() height,width", "None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None, 'invert' : RandAugment.invert", "'translateX' : augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout'", "augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else", "augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' :", "option_mode[mode] sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img", "= np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25): ratio", "else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def", "augmenters = random.choices(self.policy, k=self.num_layers) for augmenter in augmenters: level =", "cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram return cv2.cvtColor(yuv,", "cv2.COLOR_YUV2RGB) # convert YUV image to RGB def solarize(image, level=128):", "# def sharpness(img,): # kernel = np.array( # [[1, 1,", "'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under')", "return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level, 0], [0,", "augment_params.get('fliplr') else None, 'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else None,", "lut_hue = ((x * r[0]) % 180).astype(dtype) lut_sat = np.clip(x", "borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0 ,", "else None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None, # 'random_crop':random_crop", "else None, 'invert' : RandAugment.invert if augment_params.get('invert') else None, 'contrast':", "np.array([[1, 0 , 0], [level, 1 , 0], [0, 0", "1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img", "'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'),", "= cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'):", "= img.shape new_width = int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_ =", "from image distortion. Defaults to True. Returns: images (np.ndarray): images", "= level height,width,_ = img.shape new_width = int(ratio*width) img_ =", "for k,v in self.AUGMENT_FUNCTION.items() if v) # print(self.policy) def mixup(img1,img2,factor):", "lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) lut_val = np.clip(x", "= np.array( # [[1, 1, 1], # [1, 5, 1],", "+ (max_arg - min_arg) * level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) #", "= cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im)", "'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else None, 'augment_hsv' : RandAugment.augment_hsv", "in target size \"\"\" if padding: height,width,_ = img.shape delta", "return img2 def __call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers) for augmenter", "try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level = min_arg + (max_arg -", "def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray): images img_size (int,list,tuple): target", "* (1-factor) img = np.clip(img, 0,255) img = img.astype('uint8') return", "img = np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5): factor = level", "resize. Prevent from image distortion. Defaults to True. Returns: images", "center = (w / 2, h / 2) # Perform", "None, 'translateY' : RandAugment.translate_y if augment_params.get('translateY') else None, 'sharpness' :", "self.AUGMENT_FUNCTION = { 'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else None,", "augment_params.get('translateY') else None, 'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else None,", "= { 'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'),", "np.array([[1, 0 , 0], [level, 1 , translate_pixel], [0, 0", "if augment_params.get('rotate') else None, 'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else", "= img.shape delta = height - width if delta >", "= int(height*level),int(width*level) value = kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w =", ": augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'),", "= kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] =", "'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None, 'invert' : RandAugment.invert if", "augment_params.get('translateX') else None, 'translateY' : RandAugment.translate_y if augment_params.get('translateY') else None,", "= cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img # def", "cut_25_left(img,level=0.25): ratio = level height,width,_ = img.shape new_width = int(ratio*width)", "degenerate = np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None):", "images in target size \"\"\" if padding: height,width,_ = img.shape", "img2 def cut_25_left(img,level=0.25): ratio = level height,width,_ = img.shape new_width", "cv2.COLOR_RGB2YUV) if clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :,", "None, 'shearX' : RandAugment.shear_x if augment_params.get('shearX') else None, 'shearY' :", "= np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5): factor = level degenerate", "0], [0, 1 , 0], [0, 0 , 1]],dtype='float') height,width,_", "M, (width, height), borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'): height,width,_ =", "height), borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode", "[level, 1 , translate_pixel], [0, 0 , 1]],dtype='float') translate_img =", "= level height,width,_ = img.shape new_width = int((1-ratio)*width) img_ =", "augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness':", "None, 'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else None, 'hist_equalize' :", "return sheared_img def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0 , 0],", "= img[new_height:,:,:] height,width,_ = img_.shape if height > width :", "as np import cv2 import random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args:", "np.clip(img, 0,255) img = img.astype('uint8') return img def augment_fliplr(img,level): if", "= cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram return", "if v) # print(self.policy) def mixup(img1,img2,factor): img = img1.astype('float')* factor", "img2 def cut_25_above(img,level=0.25): ratio = level height,width,_ = img.shape new_height", "yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) if clahe:", "return img def augmentation_test(): img_org = cv2.imread('test.jpg') import yaml augment_params", "1], # [1, 1, 1]], dtype=tf.float32, # shape=[3, 3, 1,", "img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25):", "0 , 0], [level, 1 , 0], [0, 0 ,", "None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under if", "= ((x * r[0]) % 180).astype(dtype) lut_sat = np.clip(x *", ": RandAugment.translate_x if augment_params.get('translateX') else None, 'translateY' : RandAugment.translate_y if", "= 8 - bits # img = img >> shift", "img = img >> shift img = np.left_shift(img,shift) img =", "level=45, center = None, scale = 1.0): angle=level (h, w)", "= img_.shape if height > width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255)", "subtract 255 from the pixel. return np.where(image <= threshold, image,", "0], [level, 1 , 0], [0, 0 , 1]],dtype='float') height,width,_", "augment_params.get('cutout') else None, 'rotate' : RandAugment.rotate if augment_params.get('rotate') else None,", "cv2.BORDER_CONSTANT } mode = option_mode[mode] translate_pixel = int(width * level)", "cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype # uint8 x = np.arange(0,", "value = kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:]", "= list(k for k,v in self.AUGMENT_FUNCTION.items() if v) # print(self.policy)", "min_arg + (max_arg - min_arg) * level img = self.AUGMENT_FUNCTION[augmenter](img,level=level)", "None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None, # 'random_crop':random_crop }", "RandAugment.posterize if augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else", "uint8 x = np.arange(0, 256, dtype=r.dtype) lut_hue = ((x *", "if augment_params.get('sharpness') else None, 'cutout' : RandAugment.cutout if augment_params.get('cutout') else", "= np.array([[1, 0 , translate_pixel], [level, 1 , 0], [0,", ", 1]],dtype='float') height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE,", "(int,list,tuple): target size. eg: 224 , (224,224) or [224,224] padding", "else None, 'translateY' : RandAugment.translate_y if augment_params.get('translateY') else None, 'sharpness'", "180).astype(dtype) lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) lut_val =", "img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224))", "augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'),", "RandAugment.augment_hsv if augment_params.get('augment_hsv') else None, 'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize')", "width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2", "mixup(img1,img2,factor): img = img1.astype('float')* factor + img2.astype('float') * (1-factor) img", "return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV", "def __call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers) for augmenter in augmenters:", "'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' :", "augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under')", "'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'),", "in self.AUGMENT_FUNCTION.items() if v) # print(self.policy) def mixup(img1,img2,factor): img =", "'solarize' : RandAugment.solarize if augment_params.get('solarize') else None, 'posterize': RandAugment.posterize if", "augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy = list(k for k,v in", "np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1 #", "0]) else: yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) #", "0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val)))", ": RandAugment.augment_hsv if augment_params.get('augment_hsv') else None, 'hist_equalize' : RandAugment.hist_equalize if", "in the image, select the pixel # if the value", "height,width,_ = img.shape new_height = int(ratio*height) img_ = img[new_height:,:,:] height,width,_", "img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25): ratio = level", "level height,width,_ = img.shape new_height = int(ratio*height) img_ = img[new_height:,:,:]", "cv2.BORDER_CONSTANT } mode = option_mode[mode] sheared_img = cv2.warpPerspective(img, M, (width,", "center = None, scale = 1.0): angle=level (h, w) =", "Equalize histogram on BGR image 'im' with im.shape(n,m,3) and range", "pixel. return np.where(image <= threshold, image, 255 - image) def", "0 , 1]],dtype='float') height,width,_ = img.shape option_mode ={ 'reflect' :", "eg: 224 , (224,224) or [224,224] padding (bool): padding img", "if augment_params.get('translateX') else None, 'translateY' : RandAugment.translate_y if augment_params.get('translateY') else", "cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to", ", translate_pixel], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M,", "bits = level shift = 8 - bits # img", "else None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under", "img (np.ndarray): images img_size (int,list,tuple): target size. eg: 224 ,", "translate_pixel], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width,", "self.ARGS_LIMIT = { 'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' :", "than the threshold. # Otherwise, subtract 255 from the pixel.", "im.shape(n,m,3) and range 0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr", "the image, select the pixel # if the value is", "if height > width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2", "bgr=True): im = im.copy() # Equalize histogram on BGR image", "level=3): bits = level shift = 8 - bits #", "return img def augment_fliplr(img,level): if random.random() < level: return np.fliplr(img)", "img[new_height:,:,:] height,width,_ = img_.shape if height > width : img2", "RandAugment.cutout if augment_params.get('cutout') else None, 'rotate' : RandAugment.rotate if augment_params.get('rotate')", "image, select the pixel # if the value is less", "= int(ratio*height) img_ = img[new_height:,:,:] height,width,_ = img_.shape if height", "None, 'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else None, 'cutout' :", "Args: img (np.ndarray): images img_size (int,list,tuple): target size. eg: 224", "} mode = option_mode[mode] translate_pixel = int(width * level) M", "RandAugment.invert if augment_params.get('invert') else None, 'contrast': RandAugment.contrast if augment_params.get('contrast') else", "# cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): #", "= augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr')", "return translate_img # def sharpness(img,): # kernel = np.array( #", ", 1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return", "\"\"\" if padding: height,width,_ = img.shape delta = height -", "M = np.array([[1, level, 0], [0, 1 , 0], [0,", "def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level, 0], [0, 1 ,", "return img def rotate(image, level=45, center = None, scale =", "* level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter) return", "= np.arange(0, 256, dtype=r.dtype) lut_hue = ((x * r[0]) %", "> width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255)", "isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size) class RandAugment: def __init__(self,", "import cv2 import random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray):", "target size. eg: 224 , (224,224) or [224,224] padding (bool):", "return img2 def cut_25_right(img,level=0.25): ratio = level height,width,_ = img.shape", "'rotate' : augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'),", "= cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25): ratio = level height,width,_", "(224,224) or [224,224] padding (bool): padding img before resize. Prevent", "cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] sheared_img =", "def adjust_brightness(img,level=0.5): factor = level degenerate = np.zeros(img.shape,dtype='uint8') img =", "= img[:,new_width:,:] height,width,_ = img_.shape if height > width :", "and range 0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else", "image to RGB def solarize(image, level=128): threshold = level image", ", 0], [level, 1 , translate_pixel], [0, 0 , 1]],dtype='float')", "= im.copy() # Equalize histogram on BGR image 'im' with", "ratio = level height,width,_ = img.shape new_width = int(ratio*width) img_", "> 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img =", "1], # [1, 5, 1], # [1, 1, 1]], dtype=tf.float32,", "augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above':", "/ 2, h / 2) # Perform the rotation M", "cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img # def sharpness(img,):", "(h, w) = image.shape[:2] if center is None: center =", ": RandAugment.augment_fliplr if augment_params.get('fliplr') else None, 'augment_hsv' : RandAugment.augment_hsv if", "np import cv2 import random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img", "dtype=tf.float32, # shape=[3, 3, 1, 1]) / 13. # cv2.", "img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def __call__(self,img):", "if augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None,", "'reflect' : cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode]", "= np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25): ratio", "int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_ = img_.shape if height >", "img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values", "= 255 return img def rotate(image, level=45, center = None,", "'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above')", "RandAugment.solarize if augment_params.get('solarize') else None, 'posterize': RandAugment.posterize if augment_params.get('posterize') else", "no return needed return im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True): im", "cv2.COLOR_BGR2HSV)) dtype = im.dtype # uint8 x = np.arange(0, 256,", "'im' with im.shape(n,m,3) and range 0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV", "augment_params.get('rotate') else None, 'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else None,", "equalize Y channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else", "= img1.astype('float')* factor + img2.astype('float') * (1-factor) img = np.clip(img,", "= np.clip(x * r[2], 0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue),", "angle=level (h, w) = image.shape[:2] if center is None: center", "= np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img", "(np.ndarray): images in target size \"\"\" if padding: height,width,_ =", "= cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype # uint8 x =", "translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img def", "3) * [hgain, sgain, vgain] + 1 # random gains", "=255) if isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size) class RandAugment:", "else None, 'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else None, 'solarize'", "% 180).astype(dtype) lut_sat = np.clip(x * r[1], 0, 255).astype(dtype) lut_val", "cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): # exit() if __name__ =='__main__': augmentation_test()", "(bool): padding img before resize. Prevent from image distortion. Defaults", "return translate_img def translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect'", "img1.astype('float')* factor + img2.astype('float') * (1-factor) img = np.clip(img, 0,255)", "=255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size", "= cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) if clahe: c", "# cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): # exit() if __name__ =='__main__':", ": cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] translate_pixel", "= np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255)", "cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25): ratio =", "translate_img def translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' :", "gains hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype", ": augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left' :", "r[1], 0, 255).astype(dtype) lut_val = np.clip(x * r[2], 0, 255).astype(dtype)", "height,width,_ = img.shape new_height = int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_", "= cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE)", "img.shape padding_size = int(height*level),int(width*level) value = kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0])", "img.shape new_width = int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_ = img_.shape", "<reponame>dovietchinh/multi-task-classification<filename>source/utils/augmentations.py<gh_stars>0 import numpy as np import cv2 import random def", "# [1, 1, 1]], dtype=tf.float32, # shape=[3, 3, 1, 1])", "cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed return", "None, 'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else None, 'solarize' :", "np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if", "img_.shape if height > width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else:", "img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img def rotate(image, level=45, center =", "'rotate' : RandAugment.rotate if augment_params.get('rotate') else None, 'cut_25_left' : RandAugment.cut_25_left", "'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] translate_pixel = int(width", "if augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None,", "cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'): height,width,_", "RandAugment.rotate if augment_params.get('rotate') else None, 'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left')", "img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25): ratio = level", "1 , translate_pixel], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img,", "Returns: images (np.ndarray): images in target size \"\"\" if padding:", "'shearY' : augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness'", "# except: # print(augmenter) return img def augmentation_test(): img_org =", "value is less than the threshold. # Otherwise, subtract 255", "None, 'posterize': RandAugment.posterize if augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness if", "cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level, 0],", "= cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'):", "color-space augmentation if hgain or sgain or vgain: r =", "[1, 5, 1], # [1, 1, 1]], dtype=tf.float32, # shape=[3,", "'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'),", "0], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width,", "= RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None): return 255-img def contrast(img,factor=0.5):", "= img.shape new_height = int(ratio*height) img_ = img[new_height:,:,:] height,width,_ =", "if augment_params.get('solarize') else None, 'posterize': RandAugment.posterize if augment_params.get('posterize') else None,", "if center is None: center = (w / 2, h", ", translate_pixel], [level, 1 , 0], [0, 0 , 1]],dtype='float')", "return img def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4): im =", ":, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel", ": augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY' :", "cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25): ratio = level height,width,_ =", "ratio = level height,width,_ = img.shape new_height = int(ratio*height) img_", "if delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else:", "1 # random gains hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV))", "int(height*level),int(width*level) value = kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1])", "is None: center = (w / 2, h / 2)", ": RandAugment.sharpness if augment_params.get('sharpness') else None, 'cutout' : RandAugment.cutout if", "= img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT", "np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25): ratio =", "'posterize': RandAugment.posterize if augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness')", "factor = level degenerate = np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return", "# 'random_crop':random_crop } self.policy = list(k for k,v in self.AUGMENT_FUNCTION.items()", "translate_pixel], [level, 1 , 0], [0, 0 , 1]],dtype='float') translate_img", "vgain=0.4): im = im.copy() # HSV color-space augmentation if hgain", "hgain or sgain or vgain: r = np.random.uniform(-1, 1, 3)", "padding img before resize. Prevent from image distortion. Defaults to", "# [[1, 1, 1], # [1, 5, 1], # [1,", "'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right if", "255 - image) def posterize(img, level=3): bits = level shift", "scale = 1.0): angle=level (h, w) = image.shape[:2] if center", "= img.astype('uint8') return img def augment_fliplr(img,level): if random.random() < level:", "'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy = list(k for k,v", "shift img = np.left_shift(img,shift) img = np.right_shift(img,shift) return img.astype('uint8') def", "invert(img,level=None): return 255-img def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR)", "(width, height), borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'): height,width,_ = img.shape", "padding_size = int(height*level),int(width*level) value = kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w", "img def invert(img,level=None): return 255-img def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img,", "={ 'reflect' : cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode =", "img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT }", "'translateX' : RandAugment.translate_x if augment_params.get('translateX') else None, 'translateY' : RandAugment.translate_y", "# Otherwise, subtract 255 from the pixel. return np.where(image <=", "= int(width * level) M = np.array([[1, 0 , translate_pixel],", "height > width : img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 =", "x = np.arange(0, 256, dtype=r.dtype) lut_hue = ((x * r[0])", "np.clip(x * r[1], 0, 255).astype(dtype) lut_val = np.clip(x * r[2],", "height), borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode", "M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25): ratio = level", "option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode", ": RandAugment.solarize if augment_params.get('solarize') else None, 'posterize': RandAugment.posterize if augment_params.get('posterize')", "augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000):", "[0, 1 , 0], [0, 0 , 1]],dtype='float') height,width,_ =", "images (np.ndarray): images in target size \"\"\" if padding: height,width,_", "<= threshold, image, 255 - image) def posterize(img, level=3): bits", "0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y channel histogram", "else None, 'shearY' : RandAugment.shear_y if augment_params.get('shearY') else None, 'translateX'", "borderMode=mode) return translate_img # def sharpness(img,): # kernel = np.array(", "if padding: height,width,_ = img.shape delta = height - width", "else None, 'solarize' : RandAugment.solarize if augment_params.get('solarize') else None, 'posterize':", "(np.ndarray): images img_size (int,list,tuple): target size. eg: 224 , (224,224)", "YUV image to RGB def solarize(image, level=128): threshold = level", "posterize(img, level=3): bits = level shift = 8 - bits", "cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] translate_pixel =", "* level) M = np.array([[1, 0 , translate_pixel], [level, 1", "# print(self.policy) def mixup(img1,img2,factor): img = img1.astype('float')* factor + img2.astype('float')", "rotated def cut_25_under(img,level=0.25): ratio = level height,width,_ = img.shape new_height", "range 0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV)", "= c.apply(yuv[:, :, 0]) else: yuv[:, :, 0] = cv2.equalizeHist(yuv[:,", ": augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness' :", "= np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return", "= img.shape new_width = int(ratio*width) img_ = img[:,new_width:,:] height,width,_ =", "= { 'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else None, 'augment_hsv'", "sheared_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img def", "augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None, 'invert'", "print(augmenter) return img def augmentation_test(): img_org = cv2.imread('test.jpg') import yaml", "random gains hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype =", "def cut_25_left(img,level=0.25): ratio = level height,width,_ = img.shape new_width =", "new_width = int(ratio*width) img_ = img[:,new_width:,:] height,width,_ = img_.shape if", "= np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] + 1", "((x * r[0]) % 180).astype(dtype) lut_sat = np.clip(x * r[1],", "# img = img >> shift img = np.left_shift(img,shift) img", "= yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000): img_aug", "'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY' :", "else cv2.COLOR_RGB2YUV) if clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:,", "augmentation_test(): img_org = cv2.imread('test.jpg') import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter", "RandAugment.shear_x if augment_params.get('shearX') else None, 'shearY' : RandAugment.shear_y if augment_params.get('shearY')", "height - width if delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]],", "img def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4): im = im.copy()", "For each pixel in the image, select the pixel #", "center is None: center = (w / 2, h /", "8 - bits # img = img >> shift img", "bits # img = img >> shift img = np.left_shift(img,shift)", "else None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above", "c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :, 0] = c.apply(yuv[:,", "0] = c.apply(yuv[:, :, 0]) else: yuv[:, :, 0] =", "sheared_img def translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' :", "random.random() # try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level = min_arg +", "(w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25): ratio = level height,width,_", "= int(width * level) M = np.array([[1, 0 , 0],", "'shearX' : augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY'", "sgain or vgain: r = np.random.uniform(-1, 1, 3) * [hgain,", "= np.clip(x * r[1], 0, 255).astype(dtype) lut_val = np.clip(x *", "'random_crop':random_crop } self.policy = list(k for k,v in self.AUGMENT_FUNCTION.items() if", "height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant' :", "level = min_arg + (max_arg - min_arg) * level img", "if bgr else cv2.COLOR_RGB2YUV) if clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8,", "= np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def __call__(self,img): augmenters", "img_aug = augmenter(img_org) img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug)", "= int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_ = img_.shape if height", "'random_crop':random_crop } self.ARGS_LIMIT = { 'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'),", "'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy", "[hgain, sgain, vgain] + 1 # random gains hue, sat,", "- bits # img = img >> shift img =", "def augment_fliplr(img,level): if random.random() < level: return np.fliplr(img) return img", "np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2", "height), borderMode=mode) return translate_img # def sharpness(img,): # kernel =", "# uint8 x = np.arange(0, 256, dtype=r.dtype) lut_hue = ((x", ": cv2.BORDER_CONSTANT } mode = option_mode[mode] sheared_img = cv2.warpPerspective(img, M,", "None, 'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right", "(img_size,img_size) return cv2.resize(img,img_size) class RandAugment: def __init__(self, augment_params): self.num_layers =", "rotation M = cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image, M,", "def __init__(self, augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr'", "random.random() < level: return np.fliplr(img) return img def augment_hsv(im, level=None,", "def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def", "im.dtype # uint8 x = np.arange(0, 256, dtype=r.dtype) lut_hue =", "0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]],", "else: yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize", "augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'),", "self.AUGMENT_FUNCTION.items() if v) # print(self.policy) def mixup(img1,img2,factor): img = img1.astype('float')*", ">> shift img = np.left_shift(img,shift) img = np.right_shift(img,shift) return img.astype('uint8')", "__call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers) for augmenter in augmenters: level", "np.where(image <= threshold, image, 255 - image) def posterize(img, level=3):", "in augmenters: level = random.random() # try: min_arg,max_arg = self.ARGS_LIMIT[augmenter]", "vgain] + 1 # random gains hue, sat, val =", "histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert", "# if the value is less than the threshold. #", "if augment_params.get('contrast') else None, 'shearX' : RandAugment.shear_x if augment_params.get('shearX') else", "img >> shift img = np.left_shift(img,shift) img = np.right_shift(img,shift) return", "0 , 0], [level, 1 , translate_pixel], [0, 0 ,", "def augmentation_test(): img_org = cv2.imread('test.jpg') import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params')", "size. eg: 224 , (224,224) or [224,224] padding (bool): padding", "np.fliplr(img) return img def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4): im", "if augment_params.get('translateY') else None, 'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else", "random.choices(self.policy, k=self.num_layers) for augmenter in augmenters: level = random.random() #", "int(ratio*height) img_ = img[new_height:,:,:] height,width,_ = img_.shape if height >", "Y channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB)", "the pixel # if the value is less than the", ":, 0]) # equalize Y channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR", "return np.fliplr(img) return img def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4):", "im = im.copy() # HSV color-space augmentation if hgain or", "RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level, 0], [0, 1", "w) = image.shape[:2] if center is None: center = (w", "img2 def cut_25_right(img,level=0.25): ratio = level height,width,_ = img.shape new_width", "= self.ARGS_LIMIT[augmenter] level = min_arg + (max_arg - min_arg) *", "augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'),", "M, (width, height), borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'): height,width,_ =", "kernel = np.array( # [[1, 1, 1], # [1, 5,", "= int(ratio*width) img_ = img[:,new_width:,:] height,width,_ = img_.shape if height", "in range(10000): img_aug = augmenter(img_org) img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org)", "if augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None,", "def cut_25_above(img,level=0.25): ratio = level height,width,_ = img.shape new_height =", "def cut_25_right(img,level=0.25): ratio = level height,width,_ = img.shape new_width =", "img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size = (img_size,img_size)", "distortion. Defaults to True. Returns: images (np.ndarray): images in target", "mode='constant',constant_values =255) if isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size) class", "augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop }", "RandAugment: def __init__(self, augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION = {", "height,width,_ = img.shape new_width = int(ratio*width) img_ = img[:,new_width:,:] height,width,_", ",_ = img.shape padding_size = int(height*level),int(width*level) value = kwargs.get('value') cordinate_h", "# For each pixel in the image, select the pixel", "# Equalize histogram on BGR image 'im' with im.shape(n,m,3) and", "def posterize(img, level=3): bits = level shift = 8 -", "augmenter(img_org) img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad)", "augment_params.get('solarize') else None, 'posterize': RandAugment.posterize if augment_params.get('posterize') else None, 'adjust_brightness':", "def invert(img,level=None): return 255-img def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY),", "* r[2], 0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat),", "RandAugment.sharpness if augment_params.get('sharpness') else None, 'cutout' : RandAugment.cutout if augment_params.get('cutout')", "augment_params.get('hist_equalize') else None, 'solarize' : RandAugment.solarize if augment_params.get('solarize') else None,", "return sheared_img def translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect'", "dtype = im.dtype # uint8 x = np.arange(0, 256, dtype=r.dtype)", "lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed", "im.copy() # Equalize histogram on BGR image 'im' with im.shape(n,m,3)", "int(width * level) M = np.array([[1, 0 , translate_pixel], [level,", "'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy = list(k", "= np.array([[1, 0 , 0], [level, 1 , translate_pixel], [0,", "0,255) img = img.astype('uint8') return img def augment_fliplr(img,level): if random.random()", "img.shape new_height = int(ratio*height) img_ = img[new_height:,:,:] height,width,_ = img_.shape", "255-img def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor)", "cv2.resize(img,img_size) class RandAugment: def __init__(self, augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION", "cut_25_above(img,level=0.25): ratio = level height,width,_ = img.shape new_height = int(ratio*height)", "* r[1], 0, 255).astype(dtype) lut_val = np.clip(x * r[2], 0,", "less than the threshold. # Otherwise, subtract 255 from the", "return img.astype('uint8') def adjust_brightness(img,level=0.5): factor = level degenerate = np.zeros(img.shape,dtype='uint8')", "augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None, #", "pixel # if the value is less than the threshold.", "8)) yuv[:, :, 0] = c.apply(yuv[:, :, 0]) else: yuv[:,", "= img.shape new_height = int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_ =", "= np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25): ratio", "level shift = 8 - bits # img = img", "= im.dtype # uint8 x = np.arange(0, 256, dtype=r.dtype) lut_hue", "= img[:new_height,:,:] height,width,_ = img_.shape if height > width :", "histogram on BGR image 'im' with im.shape(n,m,3) and range 0-255", "np.array([[1, 0 , translate_pixel], [level, 1 , 0], [0, 0", "None, 'rotate' : RandAugment.rotate if augment_params.get('rotate') else None, 'cut_25_left' :", "img.shape new_height = int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_ = img_.shape", "= (img_size,img_size) return cv2.resize(img,img_size) class RandAugment: def __init__(self, augment_params): self.num_layers", "img = RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None): return 255-img def", "HSV color-space augmentation if hgain or sgain or vgain: r", "__init__(self, augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr' :", "img2 = cv2.resize(img2,(224,224)) return img2 def __call__(self,img): augmenters = random.choices(self.policy,", "RandAugment.translate_y if augment_params.get('translateY') else None, 'sharpness' : RandAugment.sharpness if augment_params.get('sharpness')", "if augment_params.get('fliplr') else None, 'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else", "dtype=r.dtype) lut_hue = ((x * r[0]) % 180).astype(dtype) lut_sat =", "min_arg) * level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter)", "for augmenter in augmenters: level = random.random() # try: min_arg,max_arg", "level) M = np.array([[1, 0 , translate_pixel], [level, 1 ,", ":, 0]) else: yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0])", "# no return needed return im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True):", "ratio = level height,width,_ = img.shape new_width = int((1-ratio)*width) img_", "img.copy() height,width ,_ = img.shape padding_size = int(height*level),int(width*level) value =", "cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M = np.array([[1,", "img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25):", "img[:new_height,:,:] height,width,_ = img_.shape if height > width : img2", "cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed return im_hsv def", "self.policy = list(k for k,v in self.AUGMENT_FUNCTION.items() if v) #", "hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype #", "0], [0, 0 , 1]],dtype='float') height,width,_ = img.shape option_mode ={", "except: # print(augmenter) return img def augmentation_test(): img_org = cv2.imread('test.jpg')", "= cv2.resize(img2,(224,224)) return img2 def __call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers)", "channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) #", "'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') #", "select the pixel # if the value is less than", "RandAugment.cut_25_left if augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else", "+ img2.astype('float') * (1-factor) img = np.clip(img, 0,255) img =", "* [hgain, sgain, vgain] + 1 # random gains hue,", "return im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True): im = im.copy() #", "cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) if clahe: c = cv2.createCLAHE(clipLimit=2.0,", "hist_equalize(im, level=None,clahe=True, bgr=True): im = im.copy() # Equalize histogram on", "None, 'shearY' : RandAugment.shear_y if augment_params.get('shearY') else None, 'translateX' :", "img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter) return img def", "padding: height,width,_ = img.shape delta = height - width if", "augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr' : RandAugment.augment_fliplr", "cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'): height,width,_", "np.clip(x * r[2], 0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat,", "augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None, 'cut_25_above':", "cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img def rotate(image,", "_ in range(10000): img_aug = augmenter(img_org) img_pad = preprocess(img_aug,224) #", "lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no", "def hist_equalize(im, level=None,clahe=True, bgr=True): im = im.copy() # Equalize histogram", "image) def posterize(img, level=3): bits = level shift = 8", ", 0], [level, 1 , 0], [0, 0 , 1]],dtype='float')", "the rotation M = cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image,", "None, 'invert' : RandAugment.invert if augment_params.get('invert') else None, 'contrast': RandAugment.contrast", "1, 1], # [1, 5, 1], # [1, 1, 1]],", "cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) if clahe: c =", "each pixel in the image, select the pixel # if", "sgain=0.7, vgain=0.4): im = im.copy() # HSV color-space augmentation if", "shape=[3, 3, 1, 1]) / 13. # cv2. def cutout(img,level,**kwargs):", "'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else None, 'solarize' : RandAugment.solarize", "'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX' :", "level image = image.copy() # For each pixel in the", "for _ in range(10000): img_aug = augmenter(img_org) img_pad = preprocess(img_aug,224)", "translate_img # def sharpness(img,): # kernel = np.array( # [[1,", "def mixup(img1,img2,factor): img = img1.astype('float')* factor + img2.astype('float') * (1-factor)", "* level) M = np.array([[1, 0 , 0], [level, 1", "cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25): ratio = level height,width,_ =", "255 return img def rotate(image, level=45, center = None, scale", "to RGB def solarize(image, level=128): threshold = level image =", "yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _ in", "clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :, 0] =", "Perform the rotation M = cv2.getRotationMatrix2D(center, angle, scale) rotated =", "if augment_params.get('cut_25_under') else None, # 'random_crop':random_crop } self.ARGS_LIMIT = {", "return 255-img def contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return", "if augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None,", "= random.choices(self.policy, k=self.num_layers) for augmenter in augmenters: level = random.random()", "= random.random() # try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level = min_arg", "if random.random() < level: return np.fliplr(img) return img def augment_hsv(im,", "augmenters: level = random.random() # try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level", "None, 'translateX' : RandAugment.translate_x if augment_params.get('translateX') else None, 'translateY' :", "'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else None, 'cutout' : RandAugment.cutout", "sgain, vgain] + 1 # random gains hue, sat, val", "1 , 0], [0, 0 , 1]],dtype='float') height,width,_ = img.shape", ", (224,224) or [224,224] padding (bool): padding img before resize.", "int(ratio*width) img_ = img[:,new_width:,:] height,width,_ = img_.shape if height >", "level, 0], [0, 1 , 0], [0, 0 , 1]],dtype='float')", "img[:,:new_width,:] height,width,_ = img_.shape if height > width : img2", "# cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): # exit() if", "def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0 , 0], [level, 1", "img_ = img[:new_height,:,:] height,width,_ = img_.shape if height > width", "img = img.copy() height,width ,_ = img.shape padding_size = int(height*level),int(width*level)", "cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return", "scale) rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def", ", 0], [0, 0 , 1]],dtype='float') height,width,_ = img.shape option_mode", "cv2.warpPerspective(img, M, (width, height), borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'): M", "0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if bgr else cv2.COLOR_RGB2YUV) if", "r = np.random.uniform(-1, 1, 3) * [hgain, sgain, vgain] +", "im.copy() # HSV color-space augmentation if hgain or sgain or", ": RandAugment.shear_y if augment_params.get('shearY') else None, 'translateX' : RandAugment.translate_x if", "height,width,_ = img_.shape if height > width : img2 =", "numpy as np import cv2 import random def preprocess(img,img_size,padding=True): \"\"\"[summary]", "min_arg,max_arg = self.ARGS_LIMIT[augmenter] level = min_arg + (max_arg - min_arg)", "img_size (int,list,tuple): target size. eg: 224 , (224,224) or [224,224]", "if clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :, 0]", "is less than the threshold. # Otherwise, subtract 255 from", "padding (bool): padding img before resize. Prevent from image distortion.", "angle, scale) rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated", "cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): # exit()", "else None, 'sharpness' : RandAugment.sharpness if augment_params.get('sharpness') else None, 'cutout'", "augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4): im = im.copy() # HSV", "lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return needed return im_hsv", "c.apply(yuv[:, :, 0]) else: yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :,", "cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr else cv2.COLOR_YUV2RGB) # convert YUV image", "needed return im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True): im = im.copy()", "delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values =255) else: img", "None, # 'random_crop':random_crop } self.ARGS_LIMIT = { 'fliplr' : augment_params.get('fliplr'),", "degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M", "if hgain or sgain or vgain: r = np.random.uniform(-1, 1,", "cv2 import random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray): images", "RandAugment.translate_x if augment_params.get('translateX') else None, 'translateY' : RandAugment.translate_y if augment_params.get('translateY')", "shear_x(img,level=0.4,mode='reflect'): M = np.array([[1, level, 0], [0, 1 , 0],", "= min_arg + (max_arg - min_arg) * level img =", "level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter) return img", "from the pixel. return np.where(image <= threshold, image, 255 -", "cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'): # exit() if __name__", "img[:,new_width:,:] height,width,_ = img_.shape if height > width : img2", "None, 'cutout' : RandAugment.cutout if augment_params.get('cutout') else None, 'rotate' :", "lut_val = np.clip(x * r[2], 0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue,", "= level image = image.copy() # For each pixel in", "} mode = option_mode[mode] sheared_img = cv2.warpPerspective(img, M, (width, height),", "threshold. # Otherwise, subtract 255 from the pixel. return np.where(image", "0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode)", "- min_arg) * level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: #", "img = img1.astype('float')* factor + img2.astype('float') * (1-factor) img =", "contrast(img,factor=0.5): degenerate = cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'):", "augment_params.get('sharpness') else None, 'cutout' : RandAugment.cutout if augment_params.get('cutout') else None,", "(1-factor) img = np.clip(img, 0,255) img = img.astype('uint8') return img", "'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else None, 'hist_equalize' : RandAugment.hist_equalize", "= RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000): img_aug = augmenter(img_org) img_pad", "= np.array([[1, 0 , 0], [level, 1 , 0], [0,", "img_ = img[:,:new_width,:] height,width,_ = img_.shape if height > width", "augmenter in augmenters: level = random.random() # try: min_arg,max_arg =", "int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_ = img_.shape if height >", "BGR image 'im' with im.shape(n,m,3) and range 0-255 yuv =", "image distortion. Defaults to True. Returns: images (np.ndarray): images in", "print(self.policy) def mixup(img1,img2,factor): img = img1.astype('float')* factor + img2.astype('float') *", "option_mode[mode] translate_pixel = int(width * level) M = np.array([[1, 0", "# 'random_crop':random_crop } self.ARGS_LIMIT = { 'fliplr' : augment_params.get('fliplr'), 'augment_hsv':", "= int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_ = img_.shape if height", "else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size =", ": augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout' :", "self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION = { 'fliplr' : RandAugment.augment_fliplr if", "or vgain: r = np.random.uniform(-1, 1, 3) * [hgain, sgain,", "256, dtype=r.dtype) lut_hue = ((x * r[0]) % 180).astype(dtype) lut_sat", "Otherwise, subtract 255 from the pixel. return np.where(image <= threshold,", "if augment_params.get('adjust_brightness') else None, 'invert' : RandAugment.invert if augment_params.get('invert') else", "import numpy as np import cv2 import random def preprocess(img,img_size,padding=True):", "else None, 'cut_25_left' : RandAugment.cut_25_left if augment_params.get('cut_25_left') else None, 'cut_25_right':", "augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else None, 'cut_25_under':", "- width if delta > 0: img = np.pad(img,[[0,0],[delta//2,delta//2],[0,0]], mode='constant',constant_values", "else None, 'rotate' : RandAugment.rotate if augment_params.get('rotate') else None, 'cut_25_left'", "img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25): ratio = level", "return img2 def cut_25_left(img,level=0.25): ratio = level height,width,_ = img.shape", "yuv[:, :, 0] = c.apply(yuv[:, :, 0]) else: yuv[:, :,", "augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize':", "255 from the pixel. return np.where(image <= threshold, image, 255", "'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] sheared_img = cv2.warpPerspective(img,", "return cv2.resize(img,img_size) class RandAugment: def __init__(self, augment_params): self.num_layers = augment_params['num_layers']", "1]], dtype=tf.float32, # shape=[3, 3, 1, 1]) / 13. #", "if augment_params.get('augment_hsv') else None, 'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else", "level = random.random() # try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level =", ": RandAugment.hist_equalize if augment_params.get('hist_equalize') else None, 'solarize' : RandAugment.solarize if", "convert YUV image to RGB def solarize(image, level=128): threshold =", "height,width,_ = img.shape delta = height - width if delta", "with im.shape(n,m,3) and range 0-255 yuv = cv2.cvtColor(im, cv2.COLOR_BGR2YUV if", "M = np.array([[1, 0 , translate_pixel], [level, 1 , 0],", "Defaults to True. Returns: images (np.ndarray): images in target size", "def translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE,", "img2 def __call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers) for augmenter in", "cv2.COLOR_HSV2BGR, dst=im) # no return needed return im_hsv def hist_equalize(im,", "img2.astype('float') * (1-factor) img = np.clip(img, 0,255) img = img.astype('uint8')", "else None, 'translateX' : RandAugment.translate_x if augment_params.get('translateX') else None, 'translateY'", "(width, height), borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1,", "factor + img2.astype('float') * (1-factor) img = np.clip(img, 0,255) img", "= None, scale = 1.0): angle=level (h, w) = image.shape[:2]", "im = im.copy() # Equalize histogram on BGR image 'im'", "224 , (224,224) or [224,224] padding (bool): padding img before", "yuv[:, :, 0] = cv2.equalizeHist(yuv[:, :, 0]) # equalize Y", "True. Returns: images (np.ndarray): images in target size \"\"\" if", ": augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop", "- image) def posterize(img, level=3): bits = level shift =", "None, 'solarize' : RandAugment.solarize if augment_params.get('solarize') else None, 'posterize': RandAugment.posterize", "# equalize Y channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if bgr", "[1, 1, 1]], dtype=tf.float32, # shape=[3, 3, 1, 1]) /", "RandAugment.augment_fliplr if augment_params.get('fliplr') else None, 'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv')", "new_height = int(ratio*height) img_ = img[new_height:,:,:] height,width,_ = img_.shape if", "'translateY' : RandAugment.translate_y if augment_params.get('translateY') else None, 'sharpness' : RandAugment.sharpness", "the value is less than the threshold. # Otherwise, subtract", "def solarize(image, level=128): threshold = level image = image.copy() #", "or sgain or vgain: r = np.random.uniform(-1, 1, 3) *", "= cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25): ratio = level height,width,_", "pixel in the image, select the pixel # if the", "def sharpness(img,): # kernel = np.array( # [[1, 1, 1],", "= im.copy() # HSV color-space augmentation if hgain or sgain", "= augmenter(img_org) img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) #", "255).astype(dtype) lut_val = np.clip(x * r[2], 0, 255).astype(dtype) im_hsv =", "to True. Returns: images (np.ndarray): images in target size \"\"\"", "img before resize. Prevent from image distortion. Defaults to True.", "cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) #", "else None, 'posterize': RandAugment.posterize if augment_params.get('posterize') else None, 'adjust_brightness': RandAugment.adjust_brightness", "0 , translate_pixel], [level, 1 , 0], [0, 0 ,", "RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000): img_aug = augmenter(img_org) img_pad =", "0], [level, 1 , translate_pixel], [0, 0 , 1]],dtype='float') translate_img", "augment_params.get('augment_hsv') else None, 'hist_equalize' : RandAugment.hist_equalize if augment_params.get('hist_equalize') else None,", "val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype # uint8 x", "augment_fliplr(img,level): if random.random() < level: return np.fliplr(img) return img def", "np.array( # [[1, 1, 1], # [1, 5, 1], #", "augmentation if hgain or sgain or vgain: r = np.random.uniform(-1,", "5, 1], # [1, 1, 1]], dtype=tf.float32, # shape=[3, 3,", "dst=im) # no return needed return im_hsv def hist_equalize(im, level=None,clahe=True,", "= cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'):", "images img_size (int,list,tuple): target size. eg: 224 , (224,224) or", "= 1.0): angle=level (h, w) = image.shape[:2] if center is", "im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR,", "cutout(img,level,**kwargs): img = img.copy() height,width ,_ = img.shape padding_size =", "augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'),", "np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size)", "cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25): ratio = level height,width,_ =", "class RandAugment: def __init__(self, augment_params): self.num_layers = augment_params['num_layers'] self.AUGMENT_FUNCTION =", "# cv2. def cutout(img,level,**kwargs): img = img.copy() height,width ,_ =", "level height,width,_ = img.shape new_width = int((1-ratio)*width) img_ = img[:,:new_width,:]", "rotated = cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25):", "if the value is less than the threshold. # Otherwise,", "\"\"\"[summary] Args: img (np.ndarray): images img_size (int,list,tuple): target size. eg:", "new_width = int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_ = img_.shape if", "mode='constant',constant_values =255) else: img = np.pad(img,[[-delta//2,-delta//2],[0,0],[0,0]], mode='constant',constant_values =255) if isinstance(img_size,int):", "if augment_params.get('shearY') else None, 'translateX' : RandAugment.translate_x if augment_params.get('translateX') else", "img def rotate(image, level=45, center = None, scale = 1.0):", ": augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'),", "yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000): img_aug =", "augment_params.get('cut_25_under') else None, # 'random_crop':random_crop } self.ARGS_LIMIT = { 'fliplr'", "image, 255 - image) def posterize(img, level=3): bits = level", ": augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate' :", "np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5): factor = level degenerate =", ", 0], [0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M,", ": augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast':", "= np.clip(img, 0,255) img = img.astype('uint8') return img def augment_fliplr(img,level):", "None: center = (w / 2, h / 2) #", "# shape=[3, 3, 1, 1]) / 13. # cv2. def", ": augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right': augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under':", "else None, 'shearX' : RandAugment.shear_x if augment_params.get('shearX') else None, 'shearY'", "= img.shape padding_size = int(height*level),int(width*level) value = kwargs.get('value') cordinate_h =", "RandAugment.adjust_brightness if augment_params.get('adjust_brightness') else None, 'invert' : RandAugment.invert if augment_params.get('invert')", "= img.copy() height,width ,_ = img.shape padding_size = int(height*level),int(width*level) value", "+ 1 # random gains hue, sat, val = cv2.split(cv2.cvtColor(im,", "1]],dtype='float') height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant'", "[level, 1 , 0], [0, 0 , 1]],dtype='float') height,width,_ =", "list(k for k,v in self.AUGMENT_FUNCTION.items() if v) # print(self.policy) def", "img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_right(img,level=0.25):", "borderMode=mode) return sheared_img def translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={", "RandAugment.cut_25_right if augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above if augment_params.get('cut_25_above') else", "RandAugment.cut_25_under if augment_params.get('cut_25_under') else None, # 'random_crop':random_crop } self.ARGS_LIMIT =", "cv2.resize(img2,(224,224)) return img2 def __call__(self,img): augmenters = random.choices(self.policy, k=self.num_layers) for", "image.copy() # For each pixel in the image, select the", ": img2 = np.pad(img_,[[0,0],[(height-width)//2,(height-width)//2],[0,0]],mode='constant',constant_values=255) else: img2 = np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 =", "np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def __call__(self,img): augmenters =", "cv2.imread('test.jpg') import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for", "preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if cv2.waitKey(0)==ord('q'):", "'invert' : RandAugment.invert if augment_params.get('invert') else None, 'contrast': RandAugment.contrast if", "RandAugment.hist_equalize if augment_params.get('hist_equalize') else None, 'solarize' : RandAugment.solarize if augment_params.get('solarize')", "None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right') else None, 'cut_25_above': RandAugment.cut_25_above if", "M, (width, height), borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'): M =", "if augment_params.get('hist_equalize') else None, 'solarize' : RandAugment.solarize if augment_params.get('solarize') else", "hgain=0.015, sgain=0.7, vgain=0.4): im = im.copy() # HSV color-space augmentation", "img.shape delta = height - width if delta > 0:", "translate_img = cv2.warpPerspective(img, M, (width, height), borderMode=mode) return translate_img #", "[224,224] padding (bool): padding img before resize. Prevent from image", "return needed return im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True): im =", "else None, 'augment_hsv' : RandAugment.augment_hsv if augment_params.get('augment_hsv') else None, 'hist_equalize'", "np.arange(0, 256, dtype=r.dtype) lut_hue = ((x * r[0]) % 180).astype(dtype)", "def translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE,", "} self.policy = list(k for k,v in self.AUGMENT_FUNCTION.items() if v)", "img = img.astype('uint8') return img def augment_fliplr(img,level): if random.random() <", "3, 1, 1]) / 13. # cv2. def cutout(img,level,**kwargs): img", "'sharpness' : augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left'", "= cv2.warpAffine(image, M, (w, h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25): ratio", "cut_25_under(img,level=0.25): ratio = level height,width,_ = img.shape new_height = int((1-ratio)*height)", "/ 2) # Perform the rotation M = cv2.getRotationMatrix2D(center, angle,", "k,v in self.AUGMENT_FUNCTION.items() if v) # print(self.policy) def mixup(img1,img2,factor): img", "M = np.array([[1, 0 , 0], [level, 1 , 0],", "new_height = int((1-ratio)*height) img_ = img[:new_height,:,:] height,width,_ = img_.shape if", "= level height,width,_ = img.shape new_height = int((1-ratio)*height) img_ =", "0]) # equalize Y channel histogram return cv2.cvtColor(yuv, cv2.COLOR_YUV2BGR if", "'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX'", "1, 3) * [hgain, sgain, vgain] + 1 # random", "= level height,width,_ = img.shape new_height = int(ratio*height) img_ =", "img_org = cv2.imread('test.jpg') import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter =", "augment_params.get('shearY') else None, 'translateX' : RandAugment.translate_x if augment_params.get('translateX') else None,", "level=128): threshold = level image = image.copy() # For each", "shift = 8 - bits # img = img >>", "'shearY' : RandAugment.shear_y if augment_params.get('shearY') else None, 'translateX' : RandAugment.translate_x", "level: return np.fliplr(img) return img def augment_hsv(im, level=None, hgain=0.015, sgain=0.7,", "sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype = im.dtype # uint8", "else None, 'cutout' : RandAugment.cutout if augment_params.get('cutout') else None, 'rotate'", "h / 2) # Perform the rotation M = cv2.getRotationMatrix2D(center,", "def augment_hsv(im, level=None, hgain=0.015, sgain=0.7, vgain=0.4): im = im.copy() #", "image.shape[:2] if center is None: center = (w / 2,", "{ 'fliplr' : RandAugment.augment_fliplr if augment_params.get('fliplr') else None, 'augment_hsv' :", "img def augment_fliplr(img,level): if random.random() < level: return np.fliplr(img) return", "= img >> shift img = np.left_shift(img,shift) img = np.right_shift(img,shift)", "(w / 2, h / 2) # Perform the rotation", "augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy = list(k for", "the threshold. # Otherwise, subtract 255 from the pixel. return", "np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_above(img,level=0.25): ratio =", "bgr else cv2.COLOR_RGB2YUV) if clahe: c = cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8))", "= cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :, 0] = c.apply(yuv[:, :,", "img_ = img[new_height:,:,:] height,width,_ = img_.shape if height > width", "height,width,_ = img.shape new_width = int((1-ratio)*width) img_ = img[:,:new_width,:] height,width,_", "(max_arg - min_arg) * level img = self.AUGMENT_FUNCTION[augmenter](img,level=level) # except:", "= self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter) return img def augmentation_test():", "range(10000): img_aug = augmenter(img_org) img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org) #", "# Perform the rotation M = cv2.getRotationMatrix2D(center, angle, scale) rotated", "translate_x(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant'", "[0, 0 , 1]],dtype='float') height,width,_ = img.shape option_mode ={ 'reflect'", "'hist_equalize' : augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'),", "bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB def", "= np.left_shift(img,shift) img = np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5): factor", "if isinstance(img_size,int): img_size = (img_size,img_size) return cv2.resize(img,img_size) class RandAugment: def", "else cv2.COLOR_YUV2RGB) # convert YUV image to RGB def solarize(image,", "import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _", ": cv2.BORDER_CONSTANT } mode = option_mode[mode] translate_pixel = int(width *", "np.pad(img_,[[(width-height)//2,(width-height)//2],[0,0],[0,0]],mode='constant',constant_values=255) img2 = cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25): ratio =", "= preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) # if", "translate_pixel = int(width * level) M = np.array([[1, 0 ,", "height), borderMode=mode) return sheared_img def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0", "[level, 1 , 0], [0, 0 , 1]],dtype='float') translate_img =", "None, scale = 1.0): angle=level (h, w) = image.shape[:2] if", "level height,width,_ = img.shape new_height = int((1-ratio)*height) img_ = img[:new_height,:,:]", "cut_25_right(img,level=0.25): ratio = level height,width,_ = img.shape new_width = int((1-ratio)*width)", "Prevent from image distortion. Defaults to True. Returns: images (np.ndarray):", "h),borderMode=cv2.BORDER_REPLICATE) return rotated def cut_25_under(img,level=0.25): ratio = level height,width,_ =", "rotate(image, level=45, center = None, scale = 1.0): angle=level (h,", "preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray): images img_size (int,list,tuple): target size.", "'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else None, # 'random_crop':random_crop } self.ARGS_LIMIT", "augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'),", "'translateY' : augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'), 'cutout' : augment_params.get('cutout'), 'rotate'", "cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv, cv2.COLOR_HSV2BGR, dst=im) # no return", "np.left_shift(img,shift) img = np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5): factor =", "cv2.createCLAHE(clipLimit=2.0, tileGridSize=(8, 8)) yuv[:, :, 0] = c.apply(yuv[:, :, 0])", "img.astype('uint8') def adjust_brightness(img,level=0.5): factor = level degenerate = np.zeros(img.shape,dtype='uint8') img", ": RandAugment.rotate if augment_params.get('rotate') else None, 'cut_25_left' : RandAugment.cut_25_left if", "'contrast': RandAugment.contrast if augment_params.get('contrast') else None, 'shearX' : RandAugment.shear_x if", "= option_mode[mode] translate_pixel = int(width * level) M = np.array([[1,", "(width, height), borderMode=mode) return translate_img # def sharpness(img,): # kernel", "height,width ,_ = img.shape padding_size = int(height*level),int(width*level) value = kwargs.get('value')", "} self.ARGS_LIMIT = { 'fliplr' : augment_params.get('fliplr'), 'augment_hsv': augment_params.get('augment_hsv'), 'hist_equalize'", "RandAugment.shear_y if augment_params.get('shearY') else None, 'translateX' : RandAugment.translate_x if augment_params.get('translateX')", ": RandAugment.cutout if augment_params.get('cutout') else None, 'rotate' : RandAugment.rotate if", "1, 1]) / 13. # cv2. def cutout(img,level,**kwargs): img =", "if augment_params.get('invert') else None, 'contrast': RandAugment.contrast if augment_params.get('contrast') else None,", "img = np.clip(img, 0,255) img = img.astype('uint8') return img def", "threshold = level image = image.copy() # For each pixel", ": cv2.BORDER_REPLICATE, 'constant' : cv2.BORDER_CONSTANT } mode = option_mode[mode] sheared_img", ": augment_params.get('hist_equalize'), 'solarize' : augment_params.get('solarize'), 'posterize': augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert'", "def cutout(img,level,**kwargs): img = img.copy() height,width ,_ = img.shape padding_size", "kwargs.get('value') cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255", "# convert YUV image to RGB def solarize(image, level=128): threshold", "cv2. def cutout(img,level,**kwargs): img = img.copy() height,width ,_ = img.shape", "cordinate_h = np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return", "= level shift = 8 - bits # img =", "im_hsv def hist_equalize(im, level=None,clahe=True, bgr=True): im = im.copy() # Equalize", "r[2], 0, 255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val,", "M = cv2.getRotationMatrix2D(center, angle, scale) rotated = cv2.warpAffine(image, M, (w,", "np.random.randint(0,height-padding_size[0]) cordinate_w = np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img def", "augmenter = RandAugment(augment_params=augment_params)#(num_layers=1) for _ in range(10000): img_aug = augmenter(img_org)", "level=None, hgain=0.015, sgain=0.7, vgain=0.4): im = im.copy() # HSV color-space", "mode = option_mode[mode] translate_pixel = int(width * level) M =", "img_size = (img_size,img_size) return cv2.resize(img,img_size) class RandAugment: def __init__(self, augment_params):", "level=None,clahe=True, bgr=True): im = im.copy() # Equalize histogram on BGR", "= level degenerate = np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return img", "# HSV color-space augmentation if hgain or sgain or vgain:", "# random gains hue, sat, val = cv2.split(cv2.cvtColor(im, cv2.COLOR_BGR2HSV)) dtype", ": RandAugment.shear_x if augment_params.get('shearX') else None, 'shearY' : RandAugment.shear_y if", "img.shape new_width = int(ratio*width) img_ = img[:,new_width:,:] height,width,_ = img_.shape", "RGB def solarize(image, level=128): threshold = level image = image.copy()", "= image.copy() # For each pixel in the image, select", ": RandAugment.translate_y if augment_params.get('translateY') else None, 'sharpness' : RandAugment.sharpness if", "sheared_img def shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0 , 0], [level,", "k=self.num_layers) for augmenter in augmenters: level = random.random() # try:", "img_pad = preprocess(img_aug,224) # cv2.imshow('a',img_org) # cv2.imshow('b',img_aug) # cv2.imshow('c',img_pad) #", "if bgr else cv2.COLOR_YUV2RGB) # convert YUV image to RGB", "import random def preprocess(img,img_size,padding=True): \"\"\"[summary] Args: img (np.ndarray): images img_size", "RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None): return 255-img def contrast(img,factor=0.5): degenerate", "size \"\"\" if padding: height,width,_ = img.shape delta = height", "255).astype(dtype) im_hsv = cv2.merge((cv2.LUT(hue, lut_hue), cv2.LUT(sat, lut_sat), cv2.LUT(val, lut_val))) cv2.cvtColor(im_hsv,", "* r[0]) % 180).astype(dtype) lut_sat = np.clip(x * r[1], 0,", "= image.shape[:2] if center is None: center = (w /", "2) # Perform the rotation M = cv2.getRotationMatrix2D(center, angle, scale)", "image 'im' with im.shape(n,m,3) and range 0-255 yuv = cv2.cvtColor(im,", "1]) / 13. # cv2. def cutout(img,level,**kwargs): img = img.copy()", "before resize. Prevent from image distortion. Defaults to True. Returns:", "None, 'contrast': RandAugment.contrast if augment_params.get('contrast') else None, 'shearX' : RandAugment.shear_x", "def rotate(image, level=45, center = None, scale = 1.0): angle=level", ": RandAugment.invert if augment_params.get('invert') else None, 'contrast': RandAugment.contrast if augment_params.get('contrast')", "RandAugment.cut_25_above if augment_params.get('cut_25_above') else None, 'cut_25_under': RandAugment.cut_25_under if augment_params.get('cut_25_under') else", "else None, # 'random_crop':random_crop } self.ARGS_LIMIT = { 'fliplr' :", "augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'), 'translateY' : augment_params.get('translateY'), 'sharpness' : augment_params.get('sharpness'),", "threshold, image, 255 - image) def posterize(img, level=3): bits =", "sharpness(img,): # kernel = np.array( # [[1, 1, 1], #", "augment_params.get('posterize'), 'adjust_brightness': augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' :", "1.0): angle=level (h, w) = image.shape[:2] if center is None:", "= cv2.imread('test.jpg') import yaml augment_params = yaml.safe_load(open('config/default/train_config.yaml')).get('augment_params') augmenter = RandAugment(augment_params=augment_params)#(num_layers=1)", "= height - width if delta > 0: img =", "= cv2.cvtColor(cv2.cvtColor(img, cv2.COLOR_BGR2GRAY), cv2.COLOR_GRAY2BGR) return RandAugment.mixup(img,degenerate,factor) def shear_x(img,level=0.4,mode='reflect'): M =", "# try: min_arg,max_arg = self.ARGS_LIMIT[augmenter] level = min_arg + (max_arg", "img_ = img[:,new_width:,:] height,width,_ = img_.shape if height > width", "13. # cv2. def cutout(img,level,**kwargs): img = img.copy() height,width ,_", "= img[:,:new_width,:] height,width,_ = img_.shape if height > width :", "< level: return np.fliplr(img) return img def augment_hsv(im, level=None, hgain=0.015,", "= np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor) return img def invert(img,level=None): return", "or [224,224] padding (bool): padding img before resize. Prevent from", ": RandAugment.cut_25_left if augment_params.get('cut_25_left') else None, 'cut_25_right': RandAugment.cut_25_right if augment_params.get('cut_25_right')", "self.AUGMENT_FUNCTION[augmenter](img,level=level) # except: # print(augmenter) return img def augmentation_test(): img_org", "= (w / 2, h / 2) # Perform the", "augment_params.get('contrast') else None, 'shearX' : RandAugment.shear_x if augment_params.get('shearX') else None,", "augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX' : augment_params.get('translateX'),", "borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={", "r[0]) % 180).astype(dtype) lut_sat = np.clip(x * r[1], 0, 255).astype(dtype)", "image = image.copy() # For each pixel in the image,", "augment_params.get('invert') else None, 'contrast': RandAugment.contrast if augment_params.get('contrast') else None, 'shearX'", "'shearX' : RandAugment.shear_x if augment_params.get('shearX') else None, 'shearY' : RandAugment.shear_y", "shear_y(img,level=0.4,mode='reflect'): M = np.array([[1, 0 , 0], [level, 1 ,", "M = np.array([[1, 0 , 0], [level, 1 , translate_pixel],", "= cv2.resize(img2,(224,224)) return img2 def cut_25_left(img,level=0.25): ratio = level height,width,_", "the pixel. return np.where(image <= threshold, image, 255 - image)", "augment_params.get('cut_25_right'), 'cut_25_above': augment_params.get('cut_25_above'), 'cut_25_under': augment_params.get('cut_25_under') # 'random_crop':random_crop } self.policy =", "'cutout' : RandAugment.cutout if augment_params.get('cutout') else None, 'rotate' : RandAugment.rotate", "augment_params.get('adjust_brightness'), 'invert' : augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY'", "augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'), 'translateX'", "else None, 'contrast': RandAugment.contrast if augment_params.get('contrast') else None, 'shearX' :", "(width, height), borderMode=mode) return translate_img def translate_y(img,level,mode='reflect'): height,width,_ = img.shape", "img def augmentation_test(): img_org = cv2.imread('test.jpg') import yaml augment_params =", ": augment_params.get('invert'), 'contrast': augment_params.get('contrast'), 'shearX' : augment_params.get('shearX'), 'shearY' : augment_params.get('shearY'),", "ratio = level height,width,_ = img.shape new_height = int((1-ratio)*height) img_", "level) M = np.array([[1, 0 , 0], [level, 1 ,", "on BGR image 'im' with im.shape(n,m,3) and range 0-255 yuv", "adjust_brightness(img,level=0.5): factor = level degenerate = np.zeros(img.shape,dtype='uint8') img = RandAugment.mixup(img,degenerate,factor)", "return rotated def cut_25_under(img,level=0.25): ratio = level height,width,_ = img.shape", "int(width * level) M = np.array([[1, 0 , 0], [level,", "img = np.left_shift(img,shift) img = np.right_shift(img,shift) return img.astype('uint8') def adjust_brightness(img,level=0.5):", "np.random.randint(0,width-padding_size[1]) img[cordinate_h:cordinate_h+padding_size[0],cordinate_w:cordinate_w+padding_size[1],:] = 255 return img def rotate(image, level=45, center", "[0, 0 , 1]],dtype='float') translate_img = cv2.warpPerspective(img, M, (width, height),", "return img def invert(img,level=None): return 255-img def contrast(img,factor=0.5): degenerate =", "translate_y(img,level,mode='reflect'): height,width,_ = img.shape option_mode ={ 'reflect' : cv2.BORDER_REPLICATE, 'constant'", "level height,width,_ = img.shape new_width = int(ratio*width) img_ = img[:,new_width:,:]", "solarize(image, level=128): threshold = level image = image.copy() # For", "'cutout' : augment_params.get('cutout'), 'rotate' : augment_params.get('rotate'), 'cut_25_left' : augment_params.get('cut_25_left'), 'cut_25_right':" ]
[ "return self.FileName class CapsulePayload(CapsuleData): '''Generate payload file, the header is", "UpdateHardwareInstance; //Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName", "self.Certificate_Guid = None self.MonotonicCount = None self.Existed = False self.Buffer", "= '0x0' ImageFileSize = os.path.getsize(self.ImageFile) if AuthData: # the ImageFileSize", "# @param self The object pointer def GenCapsuleSubItem(self): pass ##", "ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) ) if AuthData: Buffer += pack('QIHH',", "The constructor # # @param self The object pointer def", "generate capsule # # Copyright (c) 2007-2017, Intel Corporation. All", "FFS capsule data # # @param self The object pointer", "def __init__(self): self.UiName = None self.Version = None self.ImageTypeId =", "def GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys():", "capsule data # # class CapsuleFfs (CapsuleData): ## The constructor", "VendorFile = open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read() VendorFile.close() self.Existed =", "int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16),", "IS DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS IS\"", "if not self.ImageIndex: self.ImageIndex = '0x1' if not self.HardwareInstance: self.HardwareInstance", "GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from struct import pack import", "VendorFileSize = 0 if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) # #", "pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16),", "not self.Version: self.Version = '0x00000002' if not self.ImageIndex: self.ImageIndex =", "EXPRESS OR IMPLIED. # ## # Import Modules # import", "object pointer # def __init__(self) : self.Ffs = None self.FileName", "self.FvName.find('.fv') == -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper())", "constructor # # @param self The object pointer # def", "FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return FdFile else: FdFile", "generate AnyFile capsule data # # @param self The object", "http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE BSD", "CapsuleAnyFile (CapsuleData): ## The constructor # # @param self The", "def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return FfsFile ## FV class", "self.Ffs = None self.FileName = None ## generate AnyFile capsule", "GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj", "# are licensed and made available under the terms and", "__init__(self) : self.Ffs = None self.FdName = None self.CapsuleName =", "THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN", ": self.Ffs = None self.FileName = None ## generate AnyFile", "defined below: #pragma pack(1) typedef struct { UINT32 Version; EFI_GUID", "for capsule data # # class CapsuleFd (CapsuleData): ## The", "License # which accompanies this distribution. The full text of", "the ImageFileSize need include the full authenticated info size. From", "self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return", "struct { UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3];", "import uuid ## base class for capsule data # #", "None self.Existed = False self.Buffer = None def GenCapsuleSubItem(self, AuthData=[]):", "under the terms and conditions of the BSD License #", "name # def GenCapsuleSubItem(self): return self.FileName ## Afile class for", "ImageFileSize += 32 VendorFileSize = 0 if self.VendorCodeFile: VendorFileSize =", "GenCapsuleSubItem(self, AuthData=[]): if not self.Version: self.Version = '0x00000002' if not", "None FdBuffer.close() return FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile", "None self.FdName = None self.CapsuleName = None ## generate FD", "# # class CapsuleFv (CapsuleData): ## The constructor # #", "ImageFileSize need include the full authenticated info size. From first", "self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName", "BSD LICENSE ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES", "data # # class CapsuleAnyFile (CapsuleData): ## The constructor #", "# # Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR>", "# class CapsuleData: ## The constructor # # @param self", "made available under the terms and conditions of the BSD", "AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # # Append", "reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in v2", "FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close() return FvFile else:", "UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in", "16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4],", "This program and the accompanying materials # are licensed and", "license may be found at # http://opensource.org/licenses/bsd-license.php # # THE", "None self.FvName = None self.CapsuleName = None ## generate FV", "if AuthData: # the ImageFileSize need include the full authenticated", "Guid = self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1],", "this distribution. The full text of the license may be", "payload file, the header is defined below: #pragma pack(1) typedef", "self.FileName = None ## generate Afile capsule data # #", "} EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName = None self.Version =", "ImageFile.close() if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read()", "# class CapsuleAnyFile (CapsuleData): ## The constructor # # @param", "for capsule data # # class CapsuleAfile (CapsuleData): ## The", "EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize;", "from Common.Misc import SaveFileOnChange import uuid ## base class for", "self.ImageIndex: self.ImageIndex = '0x1' if not self.HardwareInstance: self.HardwareInstance = '0x0'", "ANY KIND, EITHER EXPRESS OR IMPLIED. # ## # Import", "self.Ffs = None self.FileName = None ## generate Afile capsule", "//Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName =", "self.HardwareInstance = '0x0' ImageFileSize = os.path.getsize(self.ImageFile) if AuthData: # the", "Generated file name # def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return", "accompanies this distribution. The full text of the license may", "object pointer def __init__(self): pass ## generate capsule data #", "# import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from", "# generate capsule # # Copyright (c) 2007-2017, Intel Corporation.", "= FdObj.GenFd() return FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile", "UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced", "open(self.ImageFile, 'rb') Buffer += ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile =", "constructor # # @param self The object pointer def __init__(self):", "FdFile ## AnyFile class for capsule data # # class", "Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # # Append file content to the", "= open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read() VendorFile.close() self.Existed = True", "from struct import pack import os from Common.Misc import SaveFileOnChange", "0 if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) # # Fill structure", "CapsuleData: ## The constructor # # @param self The object", "## @file # generate capsule # # Copyright (c) 2007-2017,", "if AuthData: Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer", "self.ImageTypeId = None self.ImageIndex = None self.HardwareInstance = None self.ImageFile", "OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ## #", "not self.ImageIndex: self.ImageIndex = '0x1' if not self.HardwareInstance: self.HardwareInstance =", "of the license may be found at # http://opensource.org/licenses/bsd-license.php #", "Import Modules # import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import", "generate FFS capsule data # # @param self The object", "__init__(self) : self.Ffs = None self.FvName = None ## generate", "FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD class", "UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64", "pointer # def __init__(self) : self.Ffs = None self.FileName =", "bytes of MonotonicCount to last bytes of certificate. # the", "'0x0' ImageFileSize = os.path.getsize(self.ImageFile) if AuthData: # the ImageFileSize need", "'rb') Buffer += VendorFile.read() VendorFile.close() self.Existed = True return Buffer", "= '0x1' if not self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize =", "self.FvName = None self.CapsuleName = None ## generate FV capsule", "THE BSD LICENSE ON AN \"AS IS\" BASIS, # WITHOUT", "None ## generate FV capsule data # # @param self", "## # Import Modules # import Ffs from GenFdsGlobalVariable import", "typedef struct { UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8", "conditions of the BSD License # which accompanies this distribution.", "self.CapsuleName = None ## generate FV capsule data # #", "the full authenticated info size. From first bytes of MonotonicCount", "16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0,", "self.Ffs.GenFfs() return FfsFile ## FV class for capsule data #", "uuid.UUID(AuthData[4]).get_bytes_le() # # Append file content to the structure #", "GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return FfsFile ## FV class for", "the BSD License # which accompanies this distribution. The full", "in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName = None", "= [] self.Certificate_Guid = None self.MonotonicCount = None self.Existed =", "EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName = None self.Version = None", "[] self.VendorCodeFile = [] self.Certificate_Guid = None self.MonotonicCount = None", "text of the license may be found at # http://opensource.org/licenses/bsd-license.php", "None self.ImageFile = [] self.VendorCodeFile = [] self.Certificate_Guid = None", "data # # class CapsuleFd (CapsuleData): ## The constructor #", "# which accompanies this distribution. The full text of the", "-1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile =", "# Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR> #", "header is defined below: #pragma pack(1) typedef struct { UINT32", "materials # are licensed and made available under the terms", "= StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName =", "self.HardwareInstance = None self.ImageFile = [] self.VendorCodeFile = [] self.Certificate_Guid", "= None ## generate FFS capsule data # # @param", "## generate AnyFile capsule data # # @param self The", "available under the terms and conditions of the BSD License", "FFS class for capsule data # # class CapsuleFfs (CapsuleData):", "pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # #", "string Generated file name # def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs()", "None def GenCapsuleSubItem(self, AuthData=[]): if not self.Version: self.Version = '0x00000002'", "Generated file name # def GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1:", "bit is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType ImageFileSize", "AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF", "last bytes of certificate. # the 32 bit is the", "self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16), int(Guid[2],", "# This program and the accompanying materials # are licensed", "BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER", "The object pointer # @retval string Generated file name #", "if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('')", "self.Version = '0x00000002' if not self.ImageIndex: self.ImageIndex = '0x1' if", "self The object pointer # def __init__(self) : self.Ffs =", "int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0, 0, 0, ImageFileSize,", "for capsule data # # class CapsuleAnyFile (CapsuleData): ## The", "name # def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return FfsFile ##", "= None ## generate FD capsule data # # @param", "self.Ffs = None self.FvName = None ## generate FFS capsule", "file content to the structure # ImageFile = open(self.ImageFile, 'rb')", "capsule data # # @param self The object pointer def", "= '0x00000002' if not self.ImageIndex: self.ImageIndex = '0x1' if not", "__init__(self) : self.Ffs = None self.FvName = None self.CapsuleName =", "the MonotonicCount, dwLength, wRevision, wCertificateType and CertType ImageFileSize += 32", "CapsuleFv (CapsuleData): ## The constructor # # @param self The", "None ## generate AnyFile capsule data # # @param self", "generate capsule data # # @param self The object pointer", "def GenCapsuleSubItem(self): return self.FileName ## Afile class for capsule data", "if self.FvName.find('.fv') == -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj =", "not self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize = os.path.getsize(self.ImageFile) if AuthData:", "# the 32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType", "content to the structure # ImageFile = open(self.ImageFile, 'rb') Buffer", "== -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer", "# Append file content to the structure # ImageFile =", "class for capsule data # # class CapsuleFd (CapsuleData): ##", "@param self The object pointer # @retval string Generated file", "at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER", "bytes of certificate. # the 32 bit is the MonotonicCount,", "Generated file name # def GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData):", "# # Fill structure # Guid = self.ImageTypeId.split('-') Buffer =", "the accompanying materials # are licensed and made available under", "# def GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1: if self.FvName.upper() in", "AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # # Append file content", "int(self.HardwareInstance, 16) ) if AuthData: Buffer += pack('QIHH', AuthData[0], AuthData[1],", "= GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile =", "## AnyFile class for capsule data # # class CapsuleAnyFile", "int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16),", "class CapsuleFfs (CapsuleData): ## The constructor # # @param self", "FdBuffer = StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName", "self.Ffs = None self.FvName = None self.CapsuleName = None ##", "The object pointer def GenCapsuleSubItem(self): pass ## FFS class for", "uuid ## base class for capsule data # # class", "data # # @param self The object pointer # @retval", "PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS", "# def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return FfsFile ## FV", "FdBuffer.close() return FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ##", "file name # def GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData): '''Generate", "= None FdBuffer.close() return FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return", "# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS", "import SaveFileOnChange import uuid ## base class for capsule data", "## FD class for capsule data # # class CapsuleFd", "return FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile", "= None self.FileName = None ## generate AnyFile capsule data", "'0x00000002' if not self.ImageIndex: self.ImageIndex = '0x1' if not self.HardwareInstance:", "FdObj.GenFd() return FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ##", "the license may be found at # http://opensource.org/licenses/bsd-license.php # #", "import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from struct", "# # THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE", "to the structure # ImageFile = open(self.ImageFile, 'rb') Buffer +=", "The full text of the license may be found at", "= GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD class for capsule data", "@retval string Generated file name # def GenCapsuleSubItem(self): if self.FdName.find('.fd')", "# def __init__(self) : self.Ffs = None self.FileName = None", "include the full authenticated info size. From first bytes of", "to last bytes of certificate. # the 32 bit is", "Corporation. All rights reserved.<BR> # # This program and the", "# # @param self The object pointer def GenCapsuleSubItem(self): pass", "struct import pack import os from Common.Misc import SaveFileOnChange import", "= None self.ImageTypeId = None self.ImageIndex = None self.HardwareInstance =", "# class CapsuleFfs (CapsuleData): ## The constructor # # @param", "FvFile ## FD class for capsule data # # class", "VendorFileSize = os.path.getsize(self.VendorCodeFile) # # Fill structure # Guid =", "GenCapsuleSubItem(self): pass ## FFS class for capsule data # #", "32 VendorFileSize = 0 if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) #", "[] self.Certificate_Guid = None self.MonotonicCount = None self.Existed = False", "else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD class for", "GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName", "in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return FdFile", "self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) # # Fill structure # Guid", "else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile class for", "# @param self The object pointer def __init__(self): pass ##", "= os.path.getsize(self.ImageFile) if AuthData: # the ImageFileSize need include the", "FvObj.CapsuleName = None FdBuffer.close() return FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName)", "16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0, 0, 0,", "if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd()", "= None self.FdName = None self.CapsuleName = None ## generate", ") if AuthData: Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3])", "None self.FileName = None ## generate AnyFile capsule data #", "class for capsule data # # class CapsuleFv (CapsuleData): ##", "@retval string Generated file name # def GenCapsuleSubItem(self): if self.FvName.find('.fv')", "full authenticated info size. From first bytes of MonotonicCount to", "+= 32 VendorFileSize = 0 if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile)", "and CertType ImageFileSize += 32 VendorFileSize = 0 if self.VendorCodeFile:", "= self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16),", "info size. From first bytes of MonotonicCount to last bytes", "UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance;", "pointer # def __init__(self) : self.Ffs = None self.FvName =", "CapsuleAfile (CapsuleData): ## The constructor # # @param self The", "capsule # # Copyright (c) 2007-2017, Intel Corporation. All rights", "pack import os from Common.Misc import SaveFileOnChange import uuid ##", "self.Version = None self.ImageTypeId = None self.ImageIndex = None self.HardwareInstance", "name # def GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1: if self.FdName.upper()", "self.FileName class CapsulePayload(CapsuleData): '''Generate payload file, the header is defined", "DISTRIBUTED UNDER THE BSD LICENSE ON AN \"AS IS\" BASIS,", "wCertificateType and CertType ImageFileSize += 32 VendorFileSize = 0 if", "CapsulePayload(CapsuleData): '''Generate payload file, the header is defined below: #pragma", "None self.MonotonicCount = None self.Existed = False self.Buffer = None", "# # @param self The object pointer def __init__(self): pass", "size. From first bytes of MonotonicCount to last bytes of", "None self.ImageTypeId = None self.ImageIndex = None self.HardwareInstance = None", "may be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM", "Fill structure # Guid = self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16),", ": self.Ffs = None self.FileName = None ## generate Afile", "OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. #", "0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) ) if AuthData: Buffer +=", "int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16),", "CapsuleFd (CapsuleData): ## The constructor # # @param self The", "FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD class for capsule", "= open(self.ImageFile, 'rb') Buffer += ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile", "0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) ) if AuthData: Buffer", "## The constructor # # @param self The object pointer", "class CapsuleFv (CapsuleData): ## The constructor # # @param self", "## generate FV capsule data # # @param self The", "# # This program and the accompanying materials # are", "which accompanies this distribution. The full text of the license", "object pointer def GenCapsuleSubItem(self): pass ## FFS class for capsule", "be found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS", "file name # def GenCapsuleSubItem(self): return self.FileName ## Afile class", "for capsule data # # class CapsuleData: ## The constructor", "== -1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile", "FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile class for capsule", "= 0 if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) # # Fill", "int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16),", "@retval string Generated file name # def GenCapsuleSubItem(self): return self.FileName", "FfsFile ## FV class for capsule data # # class", "self.FileName = None ## generate AnyFile capsule data # #", "# # @param self The object pointer # @retval string", "= None self.CapsuleName = None ## generate FV capsule data", "UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize;", "@param self The object pointer def __init__(self): pass ## generate", "FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile class", "None ## generate Afile capsule data # # @param self", "class CapsuleFd (CapsuleData): ## The constructor # # @param self", "#pragma pack(1) typedef struct { UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8", "pointer # def __init__(self) : self.Ffs = None self.FdName =", "object pointer # @retval string Generated file name # def", "string Generated file name # def GenCapsuleSubItem(self): return self.FileName ##", "found at # http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED", "FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close()", "UNDER THE BSD LICENSE ON AN \"AS IS\" BASIS, #", "MonotonicCount, dwLength, wRevision, wCertificateType and CertType ImageFileSize += 32 VendorFileSize", "False self.Buffer = None def GenCapsuleSubItem(self, AuthData=[]): if not self.Version:", "int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:],", "# def GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1: if self.FdName.upper() in", "OR IMPLIED. # ## # Import Modules # import Ffs", "FD class for capsule data # # class CapsuleFd (CapsuleData):", "AuthData[1], AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # # Append file", "self The object pointer # @retval string Generated file name", "is defined below: #pragma pack(1) typedef struct { UINT32 Version;", "Generated file name # def GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1:", "Afile class for capsule data # # class CapsuleAfile (CapsuleData):", "__init__(self): pass ## generate capsule data # # @param self", "the header is defined below: #pragma pack(1) typedef struct {", "= None self.CapsuleName = None ## generate FD capsule data", "def __init__(self) : self.Ffs = None self.FdName = None self.CapsuleName", "2007-2017, Intel Corporation. All rights reserved.<BR> # # This program", "# @param self The object pointer # @retval string Generated", "= GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile class for capsule data", "base class for capsule data # # class CapsuleData: ##", "'0x1' if not self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize = os.path.getsize(self.ImageFile)", "GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD class for capsule data #", "# Import Modules # import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable", "import pack import os from Common.Misc import SaveFileOnChange import uuid", "16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8],", "= None self.ImageFile = [] self.VendorCodeFile = [] self.Certificate_Guid =", "# http://opensource.org/licenses/bsd-license.php # # THE PROGRAM IS DISTRIBUTED UNDER THE", "GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj", "= None def GenCapsuleSubItem(self, AuthData=[]): if not self.Version: self.Version =", "Append file content to the structure # ImageFile = open(self.ImageFile,", "= None self.Existed = False self.Buffer = None def GenCapsuleSubItem(self,", "IMPLIED. # ## # Import Modules # import Ffs from", "certificate. # the 32 bit is the MonotonicCount, dwLength, wRevision,", "of certificate. # the 32 bit is the MonotonicCount, dwLength,", "reserved.<BR> # # This program and the accompanying materials #", "generate FD capsule data # # @param self The object", "capsule data # # class CapsuleAfile (CapsuleData): ## The constructor", "def __init__(self) : self.Ffs = None self.FileName = None ##", "# # class CapsuleData: ## The constructor # # @param", "GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return FdFile else:", "EITHER EXPRESS OR IMPLIED. # ## # Import Modules #", "Copyright (c) 2007-2017, Intel Corporation. All rights reserved.<BR> # #", "in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName =", "= None self.HardwareInstance = None self.ImageFile = [] self.VendorCodeFile =", "## generate Afile capsule data # # @param self The", "{ UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32", "class CapsuleAfile (CapsuleData): ## The constructor # # @param self", "return FvFile ## FD class for capsule data # #", "self.ImageIndex = None self.HardwareInstance = None self.ImageFile = [] self.VendorCodeFile", "capsule data # # class CapsuleFv (CapsuleData): ## The constructor", "distribution. The full text of the license may be found", "FV capsule data # # @param self The object pointer", "AnyFile capsule data # # @param self The object pointer", "# @retval string Generated file name # def GenCapsuleSubItem(self): FfsFile", "int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16),", "for capsule data # # class CapsuleFv (CapsuleData): ## The", "# def __init__(self) : self.Ffs = None self.FvName = None", "authenticated info size. From first bytes of MonotonicCount to last", "None self.FvName = None ## generate FFS capsule data #", "structure # Guid = self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0],", "dwLength, wRevision, wCertificateType and CertType ImageFileSize += 32 VendorFileSize =", "self.FileName ## Afile class for capsule data # # class", "= [] self.VendorCodeFile = [] self.Certificate_Guid = None self.MonotonicCount =", "self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize = os.path.getsize(self.ImageFile) if AuthData: #", "16), 0, 0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) ) if", "# # class CapsuleFfs (CapsuleData): ## The constructor # #", "## generate FFS capsule data # # @param self The", "@retval string Generated file name # def GenCapsuleSubItem(self): FfsFile =", "Modules # import Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO", "class for capsule data # # class CapsuleAfile (CapsuleData): ##", "0, 0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) ) if AuthData:", "Common.Misc import SaveFileOnChange import uuid ## base class for capsule", "IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND,", "= None ## generate AnyFile capsule data # # @param", "# @retval string Generated file name # def GenCapsuleSubItem(self): if", "structure # ImageFile = open(self.ImageFile, 'rb') Buffer += ImageFile.read() ImageFile.close()", "self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close() return FvFile", "## base class for capsule data # # class CapsuleData:", "Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex; UINT8 reserved_bytes[3]; UINT32 UpdateImageSize; UINT32", "return FvFile else: FvFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FvName) return FvFile ## FD", "''' def __init__(self): self.UiName = None self.Version = None self.ImageTypeId", "# @param self The object pointer # def __init__(self) :", "@param self The object pointer def GenCapsuleSubItem(self): pass ## FFS", "terms and conditions of the BSD License # which accompanies", "import StringIO from struct import pack import os from Common.Misc", "'rb') Buffer += ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile,", "Generated file name # def GenCapsuleSubItem(self): return self.FileName ## Afile", "WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.", "int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16),", "# the ImageFileSize need include the full authenticated info size.", "FV class for capsule data # # class CapsuleFv (CapsuleData):", "None self.ImageIndex = None self.HardwareInstance = None self.ImageFile = []", "# # @param self The object pointer # def __init__(self)", "= self.Ffs.GenFfs() return FfsFile ## FV class for capsule data", "class CapsulePayload(CapsuleData): '''Generate payload file, the header is defined below:", "# Fill structure # Guid = self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ',", "AuthData: Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer +=", "__init__(self): self.UiName = None self.Version = None self.ImageTypeId = None", "pointer def __init__(self): pass ## generate capsule data # #", "if not self.Version: self.Version = '0x00000002' if not self.ImageIndex: self.ImageIndex", "are licensed and made available under the terms and conditions", "16), int(self.ImageIndex, 16), 0, 0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16)", "string Generated file name # def GenCapsuleSubItem(self): if self.FvName.find('.fv') ==", "ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb') Buffer +=", "self The object pointer def __init__(self): pass ## generate capsule", "UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER;", "= None ## generate FV capsule data # # @param", "class CapsuleAnyFile (CapsuleData): ## The constructor # # @param self", "16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0, 0, 0, ImageFileSize, VendorFileSize,", "= False self.Buffer = None def GenCapsuleSubItem(self, AuthData=[]): if not", "AnyFile class for capsule data # # class CapsuleAnyFile (CapsuleData):", "full text of the license may be found at #", "'''Generate payload file, the header is defined below: #pragma pack(1)", "+= uuid.UUID(AuthData[4]).get_bytes_le() # # Append file content to the structure", "16) ) if AuthData: Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2],", "None ## generate FFS capsule data # # @param self", "# class CapsuleAfile (CapsuleData): ## The constructor # # @param", "int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16),", "# def __init__(self) : self.Ffs = None self.FdName = None", "self.Ffs = None self.FdName = None self.CapsuleName = None ##", "self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read() VendorFile.close() self.Existed", "the 32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType and", "os.path.getsize(self.VendorCodeFile) # # Fill structure # Guid = self.ImageTypeId.split('-') Buffer", "# class CapsuleFv (CapsuleData): ## The constructor # # @param", "self.CapsuleName = None ## generate FD capsule data # #", "import os from Common.Misc import SaveFileOnChange import uuid ## base", "self.Buffer = None def GenCapsuleSubItem(self, AuthData=[]): if not self.Version: self.Version", "if not self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize = os.path.getsize(self.ImageFile) if", "generate FV capsule data # # @param self The object", "@param self The object pointer # def __init__(self) : self.Ffs", "(CapsuleData): ## The constructor # # @param self The object", "# # class CapsuleAfile (CapsuleData): ## The constructor # #", "StringIO from struct import pack import os from Common.Misc import", "self.Version: self.Version = '0x00000002' if not self.ImageIndex: self.ImageIndex = '0x1'", "GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return FdFile ## AnyFile class for capsule data #", "self.FdName = None self.CapsuleName = None ## generate FD capsule", "FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile", "None self.FileName = None ## generate Afile capsule data #", "UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; '''", "v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self): self.UiName = None self.Version", "name # def GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1: if self.FvName.upper()", "16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10],", "# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON", "UINT64 UpdateHardwareInstance; //Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def __init__(self):", "= None self.FileName = None ## generate Afile capsule data", "def GenCapsuleSubItem(self, AuthData=[]): if not self.Version: self.Version = '0x00000002' if", "return FdFile ## AnyFile class for capsule data # #", "def __init__(self): pass ## generate capsule data # # @param", "+= pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() #", "licensed and made available under the terms and conditions of", "32 bit is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType", "Buffer += ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb')", "ImageFile = open(self.ImageFile, 'rb') Buffer += ImageFile.read() ImageFile.close() if self.VendorCodeFile:", "16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:],", "if self.FdName.find('.fd') == -1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj =", "None ## generate FD capsule data # # @param self", "import GenFdsGlobalVariable import StringIO from struct import pack import os", "pass ## FFS class for capsule data # # class", "SaveFileOnChange import uuid ## base class for capsule data #", "data # # class CapsuleAfile (CapsuleData): ## The constructor #", "= None self.ImageIndex = None self.HardwareInstance = None self.ImageFile =", "wRevision, wCertificateType and CertType ImageFileSize += 32 VendorFileSize = 0", "KIND, EITHER EXPRESS OR IMPLIED. # ## # Import Modules", "## generate FD capsule data # # @param self The", "is the MonotonicCount, dwLength, wRevision, wCertificateType and CertType ImageFileSize +=", "os from Common.Misc import SaveFileOnChange import uuid ## base class", "class for capsule data # # class CapsuleAnyFile (CapsuleData): ##", "pointer def GenCapsuleSubItem(self): pass ## FFS class for capsule data", "self The object pointer def GenCapsuleSubItem(self): pass ## FFS class", "FfsFile = self.Ffs.GenFfs() return FfsFile ## FV class for capsule", "MonotonicCount to last bytes of certificate. # the 32 bit", "## generate capsule data # # @param self The object", "None self.CapsuleName = None ## generate FD capsule data #", "def GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys():", "and made available under the terms and conditions of the", "and the accompanying materials # are licensed and made available", "capsule data # # class CapsuleData: ## The constructor #", "FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close() return FvFile else: FvFile =", "UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in v2 } EFI_FIRMWARE_MANAGEMENT_CAPSULE_IMAGE_HEADER; ''' def", "of MonotonicCount to last bytes of certificate. # the 32", "int(self.ImageIndex, 16), 0, 0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance, 16) )", ": self.Ffs = None self.FvName = None self.CapsuleName = None", "Buffer += pack('QIHH', AuthData[0], AuthData[1], AuthData[2], AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le()", "class for capsule data # # class CapsuleData: ## The", "for capsule data # # class CapsuleFfs (CapsuleData): ## The", ": self.Ffs = None self.FdName = None self.CapsuleName = None", "@file # generate capsule # # Copyright (c) 2007-2017, Intel", "program and the accompanying materials # are licensed and made", "os.path.getsize(self.ImageFile) if AuthData: # the ImageFileSize need include the full", "-1: if self.FvName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FvDict.keys(): FvObj = GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer =", "below: #pragma pack(1) typedef struct { UINT32 Version; EFI_GUID UpdateImageTypeId;", "first bytes of MonotonicCount to last bytes of certificate. #", "Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16),", "pack(1) typedef struct { UINT32 Version; EFI_GUID UpdateImageTypeId; UINT8 UpdateImageIndex;", "CapsuleFfs (CapsuleData): ## The constructor # # @param self The", "## Afile class for capsule data # # class CapsuleAfile", "= pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16), int(Guid[1], 16), int(Guid[2], 16), int(Guid[3][-4:-2],", "if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read() VendorFile.close()", "and conditions of the BSD License # which accompanies this", "from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from struct import pack", "= os.path.getsize(self.VendorCodeFile) # # Fill structure # Guid = self.ImageTypeId.split('-')", "def __init__(self) : self.Ffs = None self.FvName = None ##", "return self.FileName ## Afile class for capsule data # #", "data # # @param self The object pointer def GenCapsuleSubItem(self):", "Afile capsule data # # @param self The object pointer", "need include the full authenticated info size. From first bytes", "WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR", "Ffs from GenFdsGlobalVariable import GenFdsGlobalVariable import StringIO from struct import", "The object pointer # def __init__(self) : self.Ffs = None", "self.Existed = False self.Buffer = None def GenCapsuleSubItem(self, AuthData=[]): if", "From first bytes of MonotonicCount to last bytes of certificate.", ": self.Ffs = None self.FvName = None ## generate FFS", "# ## # Import Modules # import Ffs from GenFdsGlobalVariable", "VendorFileSize, int(self.HardwareInstance, 16) ) if AuthData: Buffer += pack('QIHH', AuthData[0],", "class for capsule data # # class CapsuleFfs (CapsuleData): ##", "def GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData): '''Generate payload file, the", "None self.Version = None self.ImageTypeId = None self.ImageIndex = None", "# def GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData): '''Generate payload file,", "+= ImageFile.read() ImageFile.close() if self.VendorCodeFile: VendorFile = open(self.VendorCodeFile, 'rb') Buffer", "open(self.VendorCodeFile, 'rb') Buffer += VendorFile.read() VendorFile.close() self.Existed = True return", "16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2],", "REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED. # ##", "\"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY", "AuthData=[]): if not self.Version: self.Version = '0x00000002' if not self.ImageIndex:", "return FfsFile ## FV class for capsule data # #", "file name # def GenCapsuleSubItem(self): if self.FdName.find('.fd') == -1: if", "# # class CapsuleFd (CapsuleData): ## The constructor # #", "capsule data # # class CapsuleAnyFile (CapsuleData): ## The constructor", "= None self.FvName = None self.CapsuleName = None ## generate", "# @retval string Generated file name # def GenCapsuleSubItem(self): return", "All rights reserved.<BR> # # This program and the accompanying", "pass ## generate capsule data # # @param self The", "## FFS class for capsule data # # class CapsuleFfs", "16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex,", "Intel Corporation. All rights reserved.<BR> # # This program and", "= FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close() return FvFile else: FvFile", "accompanying materials # are licensed and made available under the", "self.UiName = None self.Version = None self.ImageTypeId = None self.ImageIndex", "the terms and conditions of the BSD License # which", "file name # def GenCapsuleSubItem(self): FfsFile = self.Ffs.GenFfs() return FfsFile", "self.FdName.find('.fd') == -1: if self.FdName.upper() in GenFdsGlobalVariable.FdfParser.Profile.FdDict.keys(): FdObj = GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper())", "self.VendorCodeFile = [] self.Certificate_Guid = None self.MonotonicCount = None self.Existed", "CertType ImageFileSize += 32 VendorFileSize = 0 if self.VendorCodeFile: VendorFileSize", "rights reserved.<BR> # # This program and the accompanying materials", "FD capsule data # # @param self The object pointer", "object pointer # def __init__(self) : self.Ffs = None self.FdName", "FdFile = FdObj.GenFd() return FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName) return", "generate Afile capsule data # # @param self The object", "ImageFileSize = os.path.getsize(self.ImageFile) if AuthData: # the ImageFileSize need include", "BSD License # which accompanies this distribution. The full text", "def __init__(self) : self.Ffs = None self.FvName = None self.CapsuleName", "None self.CapsuleName = None ## generate FV capsule data #", "= None self.MonotonicCount = None self.Existed = False self.Buffer =", "capsule data # # @param self The object pointer #", "StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None", "capsule data # # class CapsuleFd (CapsuleData): ## The constructor", "self.FvName = None ## generate FFS capsule data # #", "self.ImageFile = [] self.VendorCodeFile = [] self.Certificate_Guid = None self.MonotonicCount", "AuthData: # the ImageFileSize need include the full authenticated info", "## FV class for capsule data # # class CapsuleFv", "16), int(Guid[3][-4:-2], 16), int(Guid[3][-2:], 16), int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6],", "int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16), int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0, 0,", "= None self.Version = None self.ImageTypeId = None self.ImageIndex =", "the structure # ImageFile = open(self.ImageFile, 'rb') Buffer += ImageFile.read()", "= None ## generate Afile capsule data # # @param", "GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return FdFile else: FdFile = GenFdsGlobalVariable.ReplaceWorkspaceMacro(self.FdName)", "# class CapsuleFd (CapsuleData): ## The constructor # # @param", "The constructor # # @param self The object pointer #", "file, the header is defined below: #pragma pack(1) typedef struct", "ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR REPRESENTATIONS", "file name # def GenCapsuleSubItem(self): if self.FvName.find('.fv') == -1: if", "# Guid = self.ImageTypeId.split('-') Buffer = pack('=ILHHBBBBBBBBBBBBIIQ', int(self.Version,16), int(Guid[0], 16),", "The object pointer def __init__(self): pass ## generate capsule data", "# # Append file content to the structure # ImageFile", "int(Guid[4][-2:], 16), int(self.ImageIndex, 16), 0, 0, 0, ImageFileSize, VendorFileSize, int(self.HardwareInstance,", "name # def GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData): '''Generate payload", "= self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer) FvObj.CapsuleName = None FdBuffer.close() return", "AuthData[3]) Buffer += uuid.UUID(AuthData[4]).get_bytes_le() # # Append file content to", "data # # class CapsuleData: ## The constructor # #", "if self.VendorCodeFile: VendorFileSize = os.path.getsize(self.VendorCodeFile) # # Fill structure #", "class CapsuleData: ## The constructor # # @param self The", "(c) 2007-2017, Intel Corporation. All rights reserved.<BR> # # This", "= None self.FvName = None ## generate FFS capsule data", "UINT32 UpdateImageSize; UINT32 UpdateVendorCodeSize; UINT64 UpdateHardwareInstance; //Introduced in v2 }", "string Generated file name # def GenCapsuleSubItem(self): if self.FdName.find('.fd') ==", "LICENSE ON AN \"AS IS\" BASIS, # WITHOUT WARRANTIES OR", "# def GenCapsuleSubItem(self): return self.FileName ## Afile class for capsule", "data # # class CapsuleFfs (CapsuleData): ## The constructor #", "self.MonotonicCount = None self.Existed = False self.Buffer = None def", "GenCapsuleSubItem(self): return self.FileName ## Afile class for capsule data #", "None self.HardwareInstance = None self.ImageFile = [] self.VendorCodeFile = []", "GenCapsuleSubItem(self): return self.FileName class CapsulePayload(CapsuleData): '''Generate payload file, the header", "= GenFdsGlobalVariable.FdfParser.Profile.FdDict.get(self.FdName.upper()) FdFile = FdObj.GenFd() return FdFile else: FdFile =", "# # class CapsuleAnyFile (CapsuleData): ## The constructor # #", "object pointer # def __init__(self) : self.Ffs = None self.FvName", "GenFdsGlobalVariable.FdfParser.Profile.FvDict.get(self.FvName.upper()) FdBuffer = StringIO.StringIO('') FvObj.CapsuleName = self.CapsuleName FvFile = FvObj.AddToBuffer(FdBuffer)", "int(Guid[4][-12:-10], 16), int(Guid[4][-10:-8], 16), int(Guid[4][-8:-6], 16), int(Guid[4][-6:-4], 16), int(Guid[4][-4:-2], 16),", "data # # class CapsuleFv (CapsuleData): ## The constructor #", "string Generated file name # def GenCapsuleSubItem(self): return self.FileName class", "of the BSD License # which accompanies this distribution. The", "__init__(self) : self.Ffs = None self.FileName = None ## generate", "GenFdsGlobalVariable import StringIO from struct import pack import os from", "def GenCapsuleSubItem(self): pass ## FFS class for capsule data #", "pointer # @retval string Generated file name # def GenCapsuleSubItem(self):", "# ImageFile = open(self.ImageFile, 'rb') Buffer += ImageFile.read() ImageFile.close() if", "self.ImageIndex = '0x1' if not self.HardwareInstance: self.HardwareInstance = '0x0' ImageFileSize" ]
[ "size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig, ax = plt.subplots() ax.plot(size,", "[] time = [] with open(\"pi_linear.txt\") as file: for line", "plt import numpy as np # Read data size =", "line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig, ax = plt.subplots()", "Plot data fig, ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes',", "Read data size = [] time = [] with open(\"pi_linear.txt\")", "numpy as np # Read data size = [] time", "with open(\"pi_linear.txt\") as file: for line in file.readlines(): x, y", "import matplotlib.pyplot as plt import numpy as np # Read", "import numpy as np # Read data size = []", "as np # Read data size = [] time =", "= [] with open(\"pi_linear.txt\") as file: for line in file.readlines():", "[] with open(\"pi_linear.txt\") as file: for line in file.readlines(): x,", "open(\"pi_linear.txt\") as file: for line in file.readlines(): x, y =", "ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi", "time = [] with open(\"pi_linear.txt\") as file: for line in", "= plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi linear')", "x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig,", "data size = [] time = [] with open(\"pi_linear.txt\") as", "ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi linear') #ax.grid() fig.savefig(\"pi_linear.png\")", "plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi linear') #ax.grid()", "as file: for line in file.readlines(): x, y = line.split(',')", "size = [] time = [] with open(\"pi_linear.txt\") as file:", "matplotlib.pyplot as plt import numpy as np # Read data", "= [] time = [] with open(\"pi_linear.txt\") as file: for", "data fig, ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time", "as plt import numpy as np # Read data size", "np # Read data size = [] time = []", "# Read data size = [] time = [] with", "= line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig, ax =", "file: for line in file.readlines(): x, y = line.split(',') size.append(int(x.strip()))", "time) ax.set(xlabel='Num. processes', ylabel='Time (s)', title='Pi linear') #ax.grid() fig.savefig(\"pi_linear.png\") plt.show()", "time.append(float(y.strip())) # Plot data fig, ax = plt.subplots() ax.plot(size, time)", "in file.readlines(): x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot", "y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data fig, ax", "line in file.readlines(): x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) #", "for line in file.readlines(): x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip()))", "file.readlines(): x, y = line.split(',') size.append(int(x.strip())) time.append(float(y.strip())) # Plot data", "fig, ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num. processes', ylabel='Time (s)',", "# Plot data fig, ax = plt.subplots() ax.plot(size, time) ax.set(xlabel='Num." ]
[ "each partition :type partition_result: List[PartitionResult] \"\"\" self.topic = topic self.partition_result", "election results, or an empty array if the requester did", "partition_id self.error_code = error_code self.error_message = error_message class ReplicaElectionResult: topic:", "name :type topic: str :param partition_result: The results for each", ":type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms = throttle_time_ms self.replica_election_results = replica_election_results", "The result error, or zero if there was no error.", "def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The", "typing import ClassVar, List, Optional from ...constants import ApiKey, ErrorCode", "str partition_result: List[PartitionResult] def __init__(self, topic: str, partition_result: List[PartitionResult]): \"\"\"", "error_message: Optional[str] def __init__(self, partition_id: int, error_code: ErrorCode, error_message: Optional[str]):", "self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key:", "List, Optional from ...constants import ApiKey, ErrorCode from ..base import", "error_message class ReplicaElectionResult: topic: str partition_result: List[PartitionResult] def __init__(self, topic:", "class PartitionResult: partition_id: int error_code: ErrorCode error_message: Optional[str] def __init__(self,", "ErrorCode :param error_message: The result message, or null if there", "which the request was throttled due to a quota violation,", "for all partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms = throttle_time_ms", "__init__(self, topic: str, partition_result: List[PartitionResult]): \"\"\" :param topic: The topic", "a quota violation, or zero if the request did not", ":param partition_id: The partition id :type partition_id: int :param error_code:", "error_message: The result message, or null if there was no", "results for each partition :type partition_result: List[PartitionResult] \"\"\" self.topic =", "quota. :type throttle_time_ms: int :param replica_election_results: The election results, or", ":param throttle_time_ms: The duration in milliseconds for which the request", "api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]):", "The election results, or an empty array if the requester", "self.error_message = error_message class ReplicaElectionResult: topic: str partition_result: List[PartitionResult] def", "did not have permission and the request asks for all", "error_code self.error_message = error_message class ReplicaElectionResult: topic: str partition_result: List[PartitionResult]", "partition_id: int, error_code: ErrorCode, error_message: Optional[str]): \"\"\" :param partition_id: The", "ErrorCode error_message: Optional[str] def __init__(self, partition_id: int, error_code: ErrorCode, error_message:", "there was no error. :type error_message: Optional[str] \"\"\" self.partition_id =", ":type throttle_time_ms: int :param replica_election_results: The election results, or an", "ReplicaElectionResult: topic: str partition_result: List[PartitionResult] def __init__(self, topic: str, partition_result:", "int :param replica_election_results: The election results, or an empty array", "def __init__(self, topic: str, partition_result: List[PartitionResult]): \"\"\" :param topic: The", ":param topic: The topic name :type topic: str :param partition_result:", "not have permission and the request asks for all partitions.", "topic: The topic name :type topic: str :param partition_result: The", ":param error_message: The result message, or null if there was", "the request did not violate any quota. :type throttle_time_ms: int", "\"\"\" self.topic = topic self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms:", "permission and the request asks for all partitions. :type replica_election_results:", "= partition_id self.error_code = error_code self.error_message = error_message class ReplicaElectionResult:", "asks for all partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms =", "List[PartitionResult] def __init__(self, topic: str, partition_result: List[PartitionResult]): \"\"\" :param topic:", "import ClassVar, List, Optional from ...constants import ApiKey, ErrorCode from", "Optional[str] def __init__(self, partition_id: int, error_code: ErrorCode, error_message: Optional[str]): \"\"\"", "topic self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult]", "<gh_stars>0 from typing import ClassVar, List, Optional from ...constants import", "partition_result: List[PartitionResult]): \"\"\" :param topic: The topic name :type topic:", "if the requester did not have permission and the request", "error. :type error_code: ErrorCode :param error_message: The result message, or", "topic name :type topic: str :param partition_result: The results for", "partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] =", "import ResponseData class PartitionResult: partition_id: int error_code: ErrorCode error_message: Optional[str]", "__init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The duration", "ApiKey, ErrorCode from ..base import ResponseData class PartitionResult: partition_id: int", "or zero if the request did not violate any quota.", "Optional[str] \"\"\" self.partition_id = partition_id self.error_code = error_code self.error_message =", "ErrorCode from ..base import ResponseData class PartitionResult: partition_id: int error_code:", "ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def", "int error_code: ErrorCode error_message: Optional[str] def __init__(self, partition_id: int, error_code:", "results, or an empty array if the requester did not", "List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int, replica_election_results:", "no error. :type error_message: Optional[str] \"\"\" self.partition_id = partition_id self.error_code", "throttled due to a quota violation, or zero if the", "self.error_code = error_code self.error_message = error_message class ReplicaElectionResult: topic: str", "if the request did not violate any quota. :type throttle_time_ms:", "Optional from ...constants import ApiKey, ErrorCode from ..base import ResponseData", "did not violate any quota. :type throttle_time_ms: int :param replica_election_results:", ":type topic: str :param partition_result: The results for each partition", "\"\"\" :param topic: The topic name :type topic: str :param", "due to a quota violation, or zero if the request", "id :type partition_id: int :param error_code: The result error, or", "error_message: Optional[str]): \"\"\" :param partition_id: The partition id :type partition_id:", "if there was no error. :type error_code: ErrorCode :param error_message:", "result message, or null if there was no error. :type", "\"\"\" :param partition_id: The partition id :type partition_id: int :param", "request was throttled due to a quota violation, or zero", "ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms:", "__init__(self, partition_id: int, error_code: ErrorCode, error_message: Optional[str]): \"\"\" :param partition_id:", "The results for each partition :type partition_result: List[PartitionResult] \"\"\" self.topic", "Optional[str]): \"\"\" :param partition_id: The partition id :type partition_id: int", "partition_result: The results for each partition :type partition_result: List[PartitionResult] \"\"\"", "int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The duration in milliseconds", "not violate any quota. :type throttle_time_ms: int :param replica_election_results: The", "import ApiKey, ErrorCode from ..base import ResponseData class PartitionResult: partition_id:", "from ..base import ResponseData class PartitionResult: partition_id: int error_code: ErrorCode", "for each partition :type partition_result: List[PartitionResult] \"\"\" self.topic = topic", ":param error_code: The result error, or zero if there was", "requester did not have permission and the request asks for", "from typing import ClassVar, List, Optional from ...constants import ApiKey,", "or null if there was no error. :type error_message: Optional[str]", "partition id :type partition_id: int :param error_code: The result error,", "error, or zero if there was no error. :type error_code:", "ResponseData class PartitionResult: partition_id: int error_code: ErrorCode error_message: Optional[str] def", "the request was throttled due to a quota violation, or", "The result message, or null if there was no error.", "\"\"\" :param throttle_time_ms: The duration in milliseconds for which the", "request asks for all partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms", "List[PartitionResult]): \"\"\" :param topic: The topic name :type topic: str", "replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The duration in milliseconds for", "topic: str :param partition_result: The results for each partition :type", "self.partition_id = partition_id self.error_code = error_code self.error_message = error_message class", "ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\"", "List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The duration in milliseconds for which", "violate any quota. :type throttle_time_ms: int :param replica_election_results: The election", "ErrorCode, error_message: Optional[str]): \"\"\" :param partition_id: The partition id :type", "quota violation, or zero if the request did not violate", "The partition id :type partition_id: int :param error_code: The result", "in milliseconds for which the request was throttled due to", "the request asks for all partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\"", "replica_election_results: The election results, or an empty array if the", "zero if the request did not violate any quota. :type", "str :param partition_result: The results for each partition :type partition_result:", "= error_code self.error_message = error_message class ReplicaElectionResult: topic: str partition_result:", "all partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms = throttle_time_ms self.replica_election_results", "and the request asks for all partitions. :type replica_election_results: List[ReplicaElectionResult]", "if there was no error. :type error_message: Optional[str] \"\"\" self.partition_id", "no error. :type error_code: ErrorCode :param error_message: The result message,", "or an empty array if the requester did not have", "= error_message class ReplicaElectionResult: topic: str partition_result: List[PartitionResult] def __init__(self,", "error_code: The result error, or zero if there was no", "partition_id: int :param error_code: The result error, or zero if", "error. :type error_message: Optional[str] \"\"\" self.partition_id = partition_id self.error_code =", "class ReplicaElectionResult: topic: str partition_result: List[PartitionResult] def __init__(self, topic: str,", "str, partition_result: List[PartitionResult]): \"\"\" :param topic: The topic name :type", ":type partition_result: List[PartitionResult] \"\"\" self.topic = topic self.partition_result = partition_result", "from ...constants import ApiKey, ErrorCode from ..base import ResponseData class", "message, or null if there was no error. :type error_message:", "throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self,", "violation, or zero if the request did not violate any", "topic: str, partition_result: List[PartitionResult]): \"\"\" :param topic: The topic name", "any quota. :type throttle_time_ms: int :param replica_election_results: The election results,", "was no error. :type error_message: Optional[str] \"\"\" self.partition_id = partition_id", "def __init__(self, partition_id: int, error_code: ErrorCode, error_message: Optional[str]): \"\"\" :param", "..base import ResponseData class PartitionResult: partition_id: int error_code: ErrorCode error_message:", "self.topic = topic self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int", "ClassVar, List, Optional from ...constants import ApiKey, ErrorCode from ..base", "int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms:", "partition_result: List[PartitionResult] def __init__(self, topic: str, partition_result: List[PartitionResult]): \"\"\" :param", "error_code: ErrorCode, error_message: Optional[str]): \"\"\" :param partition_id: The partition id", "empty array if the requester did not have permission and", "topic: str partition_result: List[PartitionResult] def __init__(self, topic: str, partition_result: List[PartitionResult]):", "for which the request was throttled due to a quota", "null if there was no error. :type error_message: Optional[str] \"\"\"", "was throttled due to a quota violation, or zero if", "have permission and the request asks for all partitions. :type", "throttle_time_ms: int :param replica_election_results: The election results, or an empty", "result error, or zero if there was no error. :type", "partition_id: int error_code: ErrorCode error_message: Optional[str] def __init__(self, partition_id: int,", "error_code: ErrorCode error_message: Optional[str] def __init__(self, partition_id: int, error_code: ErrorCode,", "partitions. :type replica_election_results: List[ReplicaElectionResult] \"\"\" self.throttle_time_ms = throttle_time_ms self.replica_election_results =", "= ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param", "zero if there was no error. :type error_code: ErrorCode :param", "or zero if there was no error. :type error_code: ErrorCode", "to a quota violation, or zero if the request did", "throttle_time_ms: int, replica_election_results: List[ReplicaElectionResult]): \"\"\" :param throttle_time_ms: The duration in", "milliseconds for which the request was throttled due to a", "an empty array if the requester did not have permission", ":type error_code: ErrorCode :param error_message: The result message, or null", "there was no error. :type error_code: ErrorCode :param error_message: The", "...constants import ApiKey, ErrorCode from ..base import ResponseData class PartitionResult:", "partition_result: List[PartitionResult] \"\"\" self.topic = topic self.partition_result = partition_result class", ":type partition_id: int :param error_code: The result error, or zero", "int :param error_code: The result error, or zero if there", "duration in milliseconds for which the request was throttled due", "array if the requester did not have permission and the", ":type error_message: Optional[str] \"\"\" self.partition_id = partition_id self.error_code = error_code", "= topic self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results:", ":param replica_election_results: The election results, or an empty array if", "\"\"\" self.partition_id = partition_id self.error_code = error_code self.error_message = error_message", "PartitionResult: partition_id: int error_code: ErrorCode error_message: Optional[str] def __init__(self, partition_id:", "replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS def __init__(self, throttle_time_ms: int,", "int, error_code: ErrorCode, error_message: Optional[str]): \"\"\" :param partition_id: The partition", "error_code: ErrorCode :param error_message: The result message, or null if", "List[PartitionResult] \"\"\" self.topic = topic self.partition_result = partition_result class ElectPreferredLeadersResponseData(ResponseData):", "partition :type partition_result: List[PartitionResult] \"\"\" self.topic = topic self.partition_result =", "error_message: Optional[str] \"\"\" self.partition_id = partition_id self.error_code = error_code self.error_message", "The topic name :type topic: str :param partition_result: The results", "partition_id: The partition id :type partition_id: int :param error_code: The", ":param partition_result: The results for each partition :type partition_result: List[PartitionResult]", "was no error. :type error_code: ErrorCode :param error_message: The result", "throttle_time_ms: The duration in milliseconds for which the request was", "= partition_result class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey]", "The duration in milliseconds for which the request was throttled", "the requester did not have permission and the request asks", "class ElectPreferredLeadersResponseData(ResponseData): throttle_time_ms: int replica_election_results: List[ReplicaElectionResult] api_key: ClassVar[ApiKey] = ApiKey.ELECT_PREFERRED_LEADERS", "request did not violate any quota. :type throttle_time_ms: int :param" ]
[ "test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer =", "py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop()", "pytest import py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): # The following", "not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\",", "tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert \"bar\"", "test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert", "test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with", "py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get()", "# doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer =", "def test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy()", "\"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def", "py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True)", "assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer()", "assert log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure", "log: assert not py_zipkin.storage.Stack([]).get() assert log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage():", "assert [\"a\", \"b\", \"c\"] == the_copy._storage assert [\"a\", \"b\", \"d\"]", "in a custom storage. assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer", "1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure this still works", "assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def", "py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"])", "py_zipkin.storage.Stack([]).get() assert log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make", "py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" ==", "== py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\"", "test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not", "as log: assert not py_zipkin.storage.Stack([]).get() assert log.call_count == 1 def", "def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert", "stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\", \"c\"]", "\"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs():", "test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\"", "= py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert \"bar\" ==", "following tests all expect _thread_local.zipkin_attrs to exist: if it #", "py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\",", "[\"a\", \"b\", \"c\"] == the_copy._storage assert [\"a\", \"b\", \"d\"] ==", "# Let's make sure this still works if we don't", "mock import pytest import py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): #", "not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\",", "import py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): # The following tests", "assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\",", "def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]):", "\"_storage\", [\"foo\", \"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" ==", "if it # doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs():", "\"foo\" == stack.get() stack.push(\"bar\") assert \"bar\" == stack.get() def test_stack_copy():", "import pytest import py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): # The", "[\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" ==", "py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log: assert", "py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get() assert not", "\"b\", \"c\"] == the_copy._storage assert [\"a\", \"b\", \"d\"] == stack._storage", "def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert", "assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs():", "exist: if it # doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def", "custom storage. assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer()", "\"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []):", "mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def", "@pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): # The following tests all expect", "def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer()", "\"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\"", "tests all expect _thread_local.zipkin_attrs to exist: if it # doesn't,", "log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure this", "tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop()", "py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get() stack.push(\"bar\") assert \"bar\" == stack.get()", "= py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get()", "mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log: assert not py_zipkin.storage.Stack([]).get() assert log.call_count", "py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\",", "def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log: assert not", "with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list():", "test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get() stack.push(\"bar\") assert", "\"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop()", "= py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" == context_stack.pop() assert \"foo\" ==", "assert not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log,", "stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get() stack.push(\"bar\") assert \"bar\"", "def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer", "autospec=True) as log: assert not py_zipkin.storage.Stack([]).get() assert log.call_count == 1", "with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert", "to exist: if it # doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get()", "not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log:", "== py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" ==", "assert \"foo\" == stack.get() stack.push(\"bar\") assert \"bar\" == stack.get() def", "test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\"", "\"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack,", "[\"foo\", \"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get()", "py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\")", "not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\",", "make sure this still works if we don't pass in", "== py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack =", "if we don't pass in a custom storage. assert not", "test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" == context_stack.pop() assert", "py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]):", "[]): assert not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with", "create_zipkin_attrs(): # The following tests all expect _thread_local.zipkin_attrs to exist:", "assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack,", "assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs():", "stack.push(\"bar\") assert \"bar\" == stack.get() def test_stack_copy(): stack = py_zipkin.storage.Stack()", "== 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure this still", "mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert", "def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure this still works if", "assert \"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with", "context_stack.pop() assert \"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer()", "stack.get() stack.push(\"bar\") assert \"bar\" == stack.get() def test_stack_copy(): stack =", "stack.push(\"d\") assert [\"a\", \"b\", \"c\"] == the_copy._storage assert [\"a\", \"b\",", "py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests():", "def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert", "with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log: assert not py_zipkin.storage.Stack([]).get() assert", "== py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack,", "\"bar\" == context_stack.pop() assert \"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer", "Let's make sure this still works if we don't pass", "[]): assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def", "= stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\", \"c\"] == the_copy._storage", "py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack =", "will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack,", "== stack.get() def test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy", "_thread_local.zipkin_attrs to exist: if it # doesn't, mock.patch will fail.", "py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def", "import mock import pytest import py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs():", "def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" == context_stack.pop()", "test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as log: assert not py_zipkin.storage.Stack([]).get()", "all expect _thread_local.zipkin_attrs to exist: if it # doesn't, mock.patch", "py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\",", "== py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def", "\"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" ==", "== context_stack.pop() assert \"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer =", "not py_zipkin.storage.Stack([]).get() assert log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's", "assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert", "stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\")", "The following tests all expect _thread_local.zipkin_attrs to exist: if it", "mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\"", "sure this still works if we don't pass in a", "not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer", "assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"])", "assert not py_zipkin.storage.Stack([]).get() assert log.call_count == 1 def test_storage_stack_still_works_if_you_dont_pass_in_storage(): #", "this still works if we don't pass in a custom", "def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert", "assert \"bar\" == stack.get() def test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\")", "assert \"bar\" == context_stack.pop() assert \"foo\" == context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list():", "a custom storage. assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer =", "expect _thread_local.zipkin_attrs to exist: if it # doesn't, mock.patch will", "with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert", "mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not", "= py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get() assert", "\"bar\" == stack.get() def test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\")", "assert not py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_none_if_no_zipkin_attrs(): with mock.patch.object(py_zipkin.storage.log, \"warning\", autospec=True) as", "py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" == context_stack.pop() assert \"foo\" == context_stack.get()", "doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer()", "tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]): assert \"foo\" ==", "== context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\",", "scope=\"module\") def create_zipkin_attrs(): # The following tests all expect _thread_local.zipkin_attrs", "it # doesn't, mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer", "py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []):", "with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert", "= py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get() stack.push(\"bar\") assert \"bar\" ==", "stack.get() def test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy =", "def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack = py_zipkin.storage.Stack([\"foo\"]) assert \"foo\" == stack.get() stack.push(\"bar\")", "pass in a custom storage. assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list():", "py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\",", "== py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list(): stack", "== stack.get() stack.push(\"bar\") assert \"bar\" == stack.get() def test_stack_copy(): stack", "the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\", \"c\"] ==", "with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get() assert not py_zipkin.storage.ThreadLocalStack().get()", "py_zipkin.storage @pytest.fixture(autouse=True, scope=\"module\") def create_zipkin_attrs(): # The following tests all", "we don't pass in a custom storage. assert not py_zipkin.storage.Stack().get()", "\"_storage\", [\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\"", "= py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop() def", "\"warning\", autospec=True) as log: assert not py_zipkin.storage.Stack([]).get() assert log.call_count ==", "the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\", \"c\"] == the_copy._storage assert [\"a\",", "context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert \"bar\" == context_stack.pop() assert \"foo\"", "\"bar\"]) assert \"bar\" == context_stack.pop() assert \"foo\" == context_stack.get() def", "= py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert", "\"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_with_context_returns_the_last_of_the_list(): assert \"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get()", "py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests():", "py_zipkin.storage.ThreadLocalStack().pop() def test_pop_zipkin_attrs_with_context_does_nothing_if_no_requests(): assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer =", "context_stack.get() def test_push_zipkin_attrs_adds_new_zipkin_attrs_to_list(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\"]):", "test_stack_copy(): stack = py_zipkin.storage.Stack() stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\")", "test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not", "# The following tests all expect _thread_local.zipkin_attrs to exist: if", "\"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack", "\"foo\" == py_zipkin.storage.ThreadLocalStack().get() def test_pop_zipkin_attrs_with_context_removes_the_last_zipkin_attrs(): context_stack = py_zipkin.storage.Stack([\"foo\", \"bar\"]) assert", "works if we don't pass in a custom storage. assert", "[\"foo\"]): assert \"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get()", "fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\",", "def create_zipkin_attrs(): # The following tests all expect _thread_local.zipkin_attrs to", "storage. assert not py_zipkin.storage.Stack().get() def test_get_zipkin_attrs_returns_the_last_of_the_list(): tracer = py_zipkin.storage.get_default_tracer() with", "\"foo\" == py_zipkin.storage.ThreadLocalStack().get() py_zipkin.storage.ThreadLocalStack().push(\"bar\") assert \"bar\" == py_zipkin.storage.ThreadLocalStack().get() def test_push_zipkin_attrs_with_context_adds_new_zipkin_attrs_to_list():", "stack.push(\"a\") stack.push(\"b\") the_copy = stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\",", "mock.patch.object(tracer._context_stack, \"_storage\", [\"foo\", \"bar\"]): assert \"bar\" == py_zipkin.storage.ThreadLocalStack().pop() assert \"foo\"", "tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack, \"_storage\", []): assert not py_zipkin.storage.ThreadLocalStack().get()", "don't pass in a custom storage. assert not py_zipkin.storage.Stack().get() def", "mock.patch will fail. py_zipkin.storage.ThreadLocalStack().get() def test_get_zipkin_attrs_returns_none_if_no_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with", "stack.copy() the_copy.push(\"c\") stack.push(\"d\") assert [\"a\", \"b\", \"c\"] == the_copy._storage assert", "test_storage_stack_still_works_if_you_dont_pass_in_storage(): # Let's make sure this still works if we", "still works if we don't pass in a custom storage.", "\"foo\" == py_zipkin.storage.Stack([\"bar\", \"foo\"]).get() def test_pop_zipkin_attrs_does_nothing_if_no_requests(): tracer = py_zipkin.storage.get_default_tracer() with", "assert not py_zipkin.storage.Stack([]).pop() def test_pop_zipkin_attrs_removes_the_last_zipkin_attrs(): tracer = py_zipkin.storage.get_default_tracer() with mock.patch.object(tracer._context_stack," ]
[ "store_supported=True, status_supported=True ) def _handler(self, request, response): variable = request.inputs['variable'][0].data", "plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def _handler(self, request,", "__init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)),", "Process, LiteralInput, ComplexInput, ComplexOutput from pywps import Format import logging", "ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return output class SimplePlot(Process): def", "abstract='Please enter the variable name to be plotted, example: air'),", "name to be plotted, example: air'), ] outputs = [", "10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines() plt.colorbar()", "plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines()", "as ccrs from netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def", "abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please enter the variable", "data_type='string', default='air', abstract='Please enter the variable name to be plotted,", "# https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt import cartopy.crs as", "super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and", "be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt", "server ... must be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import", "output class SimplePlot(Process): def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset',", "default='air', abstract='Please enter the variable name to be plotted, example:", "plt.close() return output class SimplePlot(Process): def __init__(self): inputs = [", "ds = Dataset(resource) values = ds.variables[variable] fig = plt.figure(figsize=(20, 10))", "the variable name to be plotted, example: air'), ] outputs", "inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def _handler(self, request, response): variable", "LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file = output response.update_status(\"simple_plot done\", 100)", "= Dataset(resource) values = ds.variables[variable] fig = plt.figure(figsize=(20, 10)) ax", "netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None):", "SimplePlot(Process): def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS,", "values = ds.variables[variable] fig = plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree())", "from pywps import Process, LiteralInput, ComplexInput, ComplexOutput from pywps import", "output or 'plot.png' ds = Dataset(resource) values = ds.variables[variable] fig", "as plt import cartopy.crs as ccrs from netCDF4 import Dataset", "supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please enter", "# no X11 server ... must be run first #", "first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt import cartopy.crs", "Format import logging LOGGER = logging.getLogger('PYWPS') import matplotlib # no", "air'), ] outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True),", "as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a", "... must be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab", "response): variable = request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced", "= 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None): output = output or", "] outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ]", "ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output)", "class SimplePlot(Process): def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')],", "import logging LOGGER = logging.getLogger('PYWPS') import matplotlib # no X11", "resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file = output response.update_status(\"simple_plot", "'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple", "inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable',", "status_supported=True ) def _handler(self, request, response): variable = request.inputs['variable'][0].data output", "plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return output", "ComplexInput, ComplexOutput from pywps import Format import logging LOGGER =", "matplotlib.pylab as plt import cartopy.crs as ccrs from netCDF4 import", "default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please enter the", "= ds.variables[variable] fig = plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0,", "nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True )", "Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot',", "'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None): output = output or 'plot.png'", "identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and simple plot.', version='1.0',", "matplotlib # no X11 server ... must be run first", "variable=None, output=None): output = output or 'plot.png' ds = Dataset(resource)", "variable=variable) LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file = output response.update_status(\"simple_plot done\",", "supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns", "LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please enter the variable name to", "cartopy.crs as ccrs from netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc'", "= request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\",", "fig.savefig(output) plt.close() return output class SimplePlot(Process): def __init__(self): inputs =", "self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and simple", "'Variable', data_type='string', default='air', abstract='Please enter the variable name to be", "logging.getLogger('PYWPS') import matplotlib # no X11 server ... must be", "_handler(self, request, response): variable = request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file,", "and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def", "pywps import Process, LiteralInput, ComplexInput, ComplexOutput from pywps import Format", "{0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please enter the variable name", "= plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close()", "def __init__(self): inputs = [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example:", ":]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return output class SimplePlot(Process):", "= output or 'plot.png' ds = Dataset(resource) values = ds.variables[variable]", "output=None): output = output or 'plot.png' ds = Dataset(resource) values", "simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file = output", "ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return output class SimplePlot(Process): def __init__(self):", "variable name to be plotted, example: air'), ] outputs =", "outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot,", "self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice and simple plot.',", "pywps import Format import logging LOGGER = logging.getLogger('PYWPS') import matplotlib", "%s\", output) response.outputs['output'].file = output response.update_status(\"simple_plot done\", 100) return response", "import matplotlib.pylab as plt import cartopy.crs as ccrs from netCDF4", "example: air'), ] outputs = [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')],", ") def _handler(self, request, response): variable = request.inputs['variable'][0].data output =", "] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot', title='Simple Plot', abstract='Returns a nice", "be plotted, example: air'), ] outputs = [ ComplexOutput('output', 'Simple", "abstract='Returns a nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True,", "= simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file =", "import Format import logging LOGGER = logging.getLogger('PYWPS') import matplotlib #", "plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return", "plt import cartopy.crs as ccrs from netCDF4 import Dataset AIR_DS", "from pywps import Format import logging LOGGER = logging.getLogger('PYWPS') import", "request, response): variable = request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable)", "a nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True", "from netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None,", "'plot.png' ds = Dataset(resource) values = ds.variables[variable] fig = plt.figure(figsize=(20,", "logging LOGGER = logging.getLogger('PYWPS') import matplotlib # no X11 server", "ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler, identifier='simple_plot',", "import cartopy.crs as ccrs from netCDF4 import Dataset AIR_DS =", "Dataset(resource) values = ds.variables[variable] fig = plt.figure(figsize=(20, 10)) ax =", "Plot', abstract='Returns a nice and simple plot.', version='1.0', inputs=inputs, outputs=outputs,", "[ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__( self._handler,", "simple plot.', version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def _handler(self,", "output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\", output) response.outputs['output'].file", "output = output or 'plot.png' ds = Dataset(resource) values =", "import Process, LiteralInput, ComplexInput, ComplexOutput from pywps import Format import", "return output class SimplePlot(Process): def __init__(self): inputs = [ ComplexInput('dataset',", "import matplotlib # no X11 server ... must be run", "= [ ComplexOutput('output', 'Simple Plot', supported_formats=[Format('image/png')], as_reference=True), ] super(SimplePlot, self).__init__(", "no X11 server ... must be run first # https://github.com/matplotlib/matplotlib/issues/3466/", "plotted, example: air'), ] outputs = [ ComplexOutput('output', 'Simple Plot',", "enter the variable name to be plotted, example: air'), ]", "'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air', abstract='Please", "matplotlib.use('Agg') import matplotlib.pylab as plt import cartopy.crs as ccrs from", "LiteralInput, ComplexInput, ComplexOutput from pywps import Format import logging LOGGER", "fig = plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :])", "Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None): output =", "must be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as", "simple_plot(resource, variable=None, output=None): output = output or 'plot.png' ds =", "= logging.getLogger('PYWPS') import matplotlib # no X11 server ... must", "title='Simple Plot', abstract='Returns a nice and simple plot.', version='1.0', inputs=inputs,", "ccrs from netCDF4 import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource,", "outputs=outputs, store_supported=True, status_supported=True ) def _handler(self, request, response): variable =", "def _handler(self, request, response): variable = request.inputs['variable'][0].data output = simple_plot(", "LOGGER = logging.getLogger('PYWPS') import matplotlib # no X11 server ...", "ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string', default='air',", "variable = request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output:", "output: %s\", output) response.outputs['output'].file = output response.update_status(\"simple_plot done\", 100) return", "https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt import cartopy.crs as ccrs", "or 'plot.png' ds = Dataset(resource) values = ds.variables[variable] fig =", "def simple_plot(resource, variable=None, output=None): output = output or 'plot.png' ds", "run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg') import matplotlib.pylab as plt import", "= [ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable',", ":, :]) ax.stock_img() ax.coastlines() plt.colorbar() fig.savefig(output) plt.close() return output class", "import Dataset AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None): output", "request.inputs['variable'][0].data output = simple_plot( resource=request.inputs['dataset'][0].file, variable=variable) LOGGER.info(\"produced output: %s\", output)", "AIR_DS = 'https://www.esrl.noaa.gov/psd/thredds/fileServer/Datasets/ncep.reanalysis.derived/surface/air.mon.ltm.nc' def simple_plot(resource, variable=None, output=None): output = output", "= plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :, :]) ax.stock_img()", "ComplexOutput from pywps import Format import logging LOGGER = logging.getLogger('PYWPS')", "plt.colorbar() fig.savefig(output) plt.close() return output class SimplePlot(Process): def __init__(self): inputs", "ds.variables[variable] fig = plt.figure(figsize=(20, 10)) ax = plt.axes(projection=ccrs.PlateCarree()) plt.contourf(values[0, :,", "[ ComplexInput('dataset', 'Dataset', supported_formats=[Format('application/x-netcdf')], default=AIR_DS, abstract='Example: {0}'.format(AIR_DS)), LiteralInput('variable', 'Variable', data_type='string',", "to be plotted, example: air'), ] outputs = [ ComplexOutput('output',", "X11 server ... must be run first # https://github.com/matplotlib/matplotlib/issues/3466/ matplotlib.use('Agg')", "version='1.0', inputs=inputs, outputs=outputs, store_supported=True, status_supported=True ) def _handler(self, request, response):" ]
[]
[ "JOIN room_members rm ON (u.id = rm.user_id) \" \"WHERE qq.correct_answer_id", "%s AND about_user_id = %s AND question_id = %s\", [user_id,", "text_id) in answers: answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id) # SELECT", "return json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room(): user_id = request.args.get('user_id') room_id", "os.path import random import re from flask import Flask, send_from_directory", "[user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms(): user_id = request.args.get('user_id')", "def index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register')", "FROM Room r INNER JOIN values = db.exec_query(\"SELECT u.email ,", "user_id = int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id,", "to '%s' ?\" % (user_name, question_text) return json.dumps({ \"id\": quiz_question_id,", "\" \"WHERE q.id = %s\", [question_id[0]]) question_text = value[0][0] value", "value = db.exec_query(\"SELECT a.answer FROM answer a WHERE a.id =", "len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) if len(possible_users_to_ask) >", "[room_id, user_id]) return json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id') def get_user_id():", "return json.dumps({\"info\": \"The room has been closed successfully\", \"status\": \"closed\"})", "asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers = []", "if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) if len(possible_users_to_ask)", "return json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room(): room_id = request.args.get('room_id') user_id", "WHERE r.id = %s\", [room_id]) # for # SELECT id,", "db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)\", [room_id, user_id])", "room_id = request.args.get('room_id') email = request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE", "received\"}) @app.route('/get_quiz_question') def get_question(): room_id = int(request.args.get('room_id')) user_id = int(request.args.get('user_id'))", "len(question_id) > 0 and 0 < len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id,", "user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room(): room_id = request.args.get('room_id')", "\"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s \" \"GROUP", "(user_name, question_text) return json.dumps({ \"id\": quiz_question_id, \"question\": question_text, \"answers\": answer_json", "opened successfully\", \"status\": \"started\"}) @app.route('/close_room') def close_room(): room_id = request.args.get('room_id')", "len(other_users)) > 0: for i in range(min(numberOfAnswers - 1, len(other_users))):", "successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE", "return json.dumps({\"error\": \"no JSON found\"}), 404 user_id = json_data[\"user_id\"] values", "json.dumps({}) id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room():", "request.args.get('user_id') values = db.exec_query(\"SELECT a.id \" \"FROM answer a INNER", "json.dumps({\"info\": \"The room has been closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room')", "if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) else: possible_questions", "\"text\": text_id}) print(quiz_question_id) # SELECT 'question' FROM 'Question' WHERE 'id'", "server error\"}) return json.dumps({ \"correct\": answered_id == correct_answer_id, \"question\": question_id,", "== correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\": text} })", "\"WHERE q.id = %s\", [question_id[0]]) question_text = value[0][0] value =", "WHERE id = %s\", [room_id]) return json.dumps({ \"status\": values[0][0] })", "quiz_question SET answered_id = %s WHERE id = %s\", [quiz_answer_id,", "question_id = %s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0] value", "SET r.status='closed' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\": \"The", "question q \" \"WHERE q.id = %s\", [question_id[0]]) question_text =", "return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register') def register(): # To", "= value[0][0] question_text = \"What did %s answer to '%s'", "\" \"INNER JOIN room_members rm ON (u.id = rm.user_id) \"", "[room_id, user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room(): room_id =", "@app.route('/create_room') def create_room(): user_id = request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO", "db app = Flask(__name__) # Regular expression to only accept", "import flask_run import datab.social_database as db app = Flask(__name__) #", "= [] for (answer_id, text_id) in answers: answer_json.append({\"id\": answer_id, \"text\":", "qq \" \"WHERE qq.id = %s\", [quiz_question_id]) answered_id = value[0][0]", "404 user_id = json_data[\"user_id\"] values = [] for a in", "3 value = db.exec_query(\"SELECT id \" \"FROM quiz_question \" \"WHERE", "val in values: response.append({\"id\": val[0], \"text\": val[1]}) return json.dumps({\"questions\": response})", "is None: return json.dumps({}) id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user})", "\" \"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s \"", "room r WHERE r.creator=%s\", [user_id]) response = [] for val", "methods=['POST']) def fill_room(): json_data = request.get_json() if json_data is None:", "db.exec_query(\"SELECT status FROM room WHERE id = %s\", [room_id]) return", "VALUES (%s)\", [user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms(): user_id", "@app.route('/post_quiz_answer') def post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE", "db.exec_query(\"SELECT a.id \" \"FROM answer a INNER JOIN question q", "WHERE id = 1 values = db.exec_query(\"SELECT status FROM room", "return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return index(\"index2.html\") @app.route('/<path:filename>') def index(filename):", "Flask(__name__) # Regular expression to only accept certain files fileChecker", "db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room(): room_id = request.args.get('room_id')", "= db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room(): room_id =", "get_question(): room_id = int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id,", "u.id=%s\", [asked_about_id[0]]) user_name = value[0][0] question_text = \"What did %s", "def join_room(): room_id = request.args.get('room_id') email = request.args.get('email') user_id =", "request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r SET r.status='started' WHERE r.id =", "(%s,%s)\", [room_id, user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room(): room_id", "def root(): return index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if fileChecker.match(filename): return", "\"FROM answer a INNER JOIN question q \" \"WHERE a.question_id", "random.sample(possible_users_to_ask, 1) if len(question_id) > 0 and 0 < len(asked_about_id):", "abort(403) @app.route('/register') def register(): # To obtain the mail email", "in values: response.append({\"id\": val[0], \"status\": val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST'])", "@app.route('/room_status') def status_room(): room_id = request.args.get('room_id') # SELECT status FROM", "user_id = json_data[\"user_id\"] values = [] for a in json_data[\"answers\"]:", "for a in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) - 1])", "= value[0][0] correct_answer_id = value[0][1] question_id = value[0][2] value =", "the mail email = request.args.get('email') print(email) if email is None:", "qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM quiz_question qq \" \"WHERE qq.id", "[correct_answer_id]) if len(value) > 0: text = value[0][0] else: text", "json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room(): room_id = request.args.get('room_id') user_id =", "values = [] for a in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"]))", "\"FROM question q \" \"WHERE q.id = %s\", [question_id[0]]) question_text", "%s\", [room_id]) return json.dumps({\"info\": \"The room has been opened successfully\",", "[quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM", "user in this room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id = request.args.get('quiz_question_id')", "> 0: for i in range(min(numberOfAnswers - 1, len(other_users))): (answer_id,", "AND question_id = %s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0]", "Regular expression to only accept certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\")", "about_user_id = %s AND question_id = %s\", [user_id, asked_about_id[0], question_id[0]])", "FROM room r WHERE r.creator=%s\", [user_id]) response = [] for", "user_id, answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def", "[] asked_about_id = [] if len(possible_questions) > 0: question_id =", "for # SELECT id, COUNT(a.id), COUNT(a.id) FROM Room r INNER", "%s \", [correct_answer_id]) if len(value) > 0: text = value[0][0]", "[asked_about_id[0]]) user_name = value[0][0] question_text = \"What did %s answer", "q \" \"WHERE q.id = %s\", [question_id[0]]) question_text = value[0][0]", "id \" \"FROM quiz_question \" \"WHERE asked_user_id = %s AND", "0: text = value[0][0] else: text = \"something when wrong\"", "= %s\", [room_id]) # for # SELECT id, COUNT(a.id), COUNT(a.id)", "email = request.args.get('email') print(email) if email is None: return json.dumps({})", "None: return json.dumps({}) id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room')", "json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def open_room(): room_id = request.args.get('room_id') print(room_id)", "json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data = request.get_json() if", "successfully\", \"status\": \"started\"}) @app.route('/close_room') def close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE", "values = db.exec_query(\"SELECT status FROM room WHERE id = %s\",", "def answered_room(): room_id = request.args.get('room_id') user_id = request.args.get('user_id') values =", "user_id = request.args.get('user_id') values = db.exec_query(\"SELECT r.id, r.status FROM room", "WHERE 'id' = 3 value = db.exec_query(\"SELECT id \" \"FROM", "response.append({\"id\": val[0], \"text\": val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def", "index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename)", "= u.id) \" \"INNER JOIN room_members rm ON (u.id =", "datab.social_database as db app = Flask(__name__) # Regular expression to", "been opened successfully\", \"status\": \"started\"}) @app.route('/close_room') def close_room(): room_id =", "# for # SELECT id, COUNT(a.id), COUNT(a.id) FROM Room r", "question_id = [] asked_about_id = [] if len(possible_questions) > 0:", "= %s\", [room_id]) response = [] for val in values:", "request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='closed' WHERE r.id = %s\",", "json_data is None: return json.dumps({\"error\": \"no JSON found\"}) else: room_id", "= request.args.get('email') print(email) if email is None: return json.dumps({}) id_user", "email = request.args.get('email') id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room')", "answers = [] (answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question", "questions for this user in this room\"}) @app.route('/post_quiz_answer') def post_answer():", "request, abort from flaskrun.flaskrun import flask_run import datab.social_database as db", "%s\", [quiz_question_id]) answered_id = value[0][0] correct_answer_id = value[0][1] question_id =", "user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id = [] asked_about_id =", "%s)\", [room_id, q]) return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def open_room():", "SELECT 'question' FROM 'Question' WHERE 'id' = 3 value =", "values: response.append({\"id\": val[0], \"status\": val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST']) def", "= request.args.get('user_id') values = db.exec_query(\"SELECT r.id, r.status FROM room r", "row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room(): room_id = request.args.get('room_id')", "quiz_question qq \" \"WHERE qq.id = %s\", [quiz_question_id]) answered_id =", "answer a INNER JOIN question q \" \"WHERE a.question_id =", "else: room_id = json_data[\"room_id\"] questions = json_data[\"question\"] for q in", "answered_id == correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\": text}", "answer_json = [] for (answer_id, text_id) in answers: answer_json.append({\"id\": answer_id,", "(u.id = rm.user_id) \" \"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id", "close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='closed' WHERE", "BY COUNT(qq.id) DESC\", [room_id]) ranking = [] for row in", "-*- import json import os.path import random import re from", "db.exec_query(\"INSERT INTO question (room_id, question) VALUES (%s, %s)\", [room_id, q])", "id, question FROM question WHERE room_id = %s\", [room_id]) response", "return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def get_question(): room_id = int(request.args.get('room_id'))", "question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers = [] (answer_id,", "room_id = request.args.get('room_id') user_id = request.args.get('user_id') values = db.exec_query(\"SELECT a.id", "= db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) # if commented the first", "val in values: response.append({\"id\": val[0], \"status\": val[1]}) return json.dumps(response) @app.route('/fill_room',", "print(quiz_question_id) # SELECT 'question' FROM 'Question' WHERE 'id' = 3", "in this room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id", "= db.exec_query(\"SELECT u.email , COUNT(qq.id) \" \"FROM quiz_question qq \"", "- 1, len(other_users)) > 0: for i in range(min(numberOfAnswers -", "= request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id = %s WHERE id", "= request.args.get('room_id') values = db.exec_query(\"SELECT id, question FROM question WHERE", "for row in values: ranking.append({\"email\": row[0], \"correct\": row[1]}) return json.dumps({\"ranking\":", "import request, abort from flaskrun.flaskrun import flask_run import datab.social_database as", "def status_room(): room_id = request.args.get('room_id') # SELECT status FROM Room", "room WHERE id = %s\", [room_id]) return json.dumps({ \"status\": values[0][0]", "correct_answer_id=%s WHERE id = %s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if", "%s\", [room_id]) response = [] for val in values: response.append({\"id\":", "= random.sample(possible_questions, 1) if len(possible_users_to_ask) > 0: asked_about_id = random.sample(possible_users_to_ask,", "= [] for val in values: response.append({\"id\": val[0], \"text\": val[1]})", "when wrong\" if value is None: return json.dumps({\"error\": \"Internal server", "quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id = %s WHERE", "= random.sample(possible_users_to_ask, 1) else: possible_users_to_ask = db.get_all_different_people(room_id, user_id) if len(possible_questions)", "asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0] value = db.exec_query(\"SELECT q.question \"", "WHERE id = %s\", [quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id,", "correct one random.shuffle(answers) answer_json = [] for (answer_id, text_id) in", "numberOfAnswers = 4 random.seed(7) def root_dir(): # pragma: no cover", "%s \" \"GROUP BY u.email \" \"ORDER BY COUNT(qq.id) DESC\",", "\"status\": \"started\"}) @app.route('/close_room') def close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room", "(answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE", "room_id = db.exec_query(\"INSERT INTO room (creator) VALUES (%s)\", [user_id]) return", "\"FROM quiz_question qq \" \"INNER JOIN users u ON (qq.asked_user_id", "coding: utf-8 -*- import json import os.path import random import", "request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id, user_id) VALUES", "\" \"FROM quiz_question \" \"WHERE asked_user_id = %s AND about_user_id", "= request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r SET r.status='started' WHERE r.id", "= [] for val in values: response.append({\"id\": val[0], \"status\": val[1]})", "will be the correct one random.shuffle(answers) answer_json = [] for", "room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id')", "= json_data[\"question\"] for q in questions: db.exec_query(\"INSERT INTO question (room_id,", "user_id}) @app.route('/answered_room') def answered_room(): room_id = request.args.get('room_id') user_id = request.args.get('user_id')", "db.exec_query(\"REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)\", [room_id, user_id]) return", "val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data =", "@app.route('/close_room') def close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET", "%s AND a.user_id= %s\", [room_id, user_id]) return json.dumps({\"answered\": len(values) >", "= json_data[\"room_id\"] questions = json_data[\"question\"] for q in questions: db.exec_query(\"INSERT", "question_id = random.sample(possible_questions, 1) if len(possible_users_to_ask) > 0: asked_about_id =", "FROM question WHERE room_id = %s\", [room_id]) response = []", "return json.dumps({\"error\": \"Internal server error\"}) return json.dumps({ \"correct\": answered_id ==", "random.seed(7) def root_dir(): # pragma: no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/')", "from flaskrun.flaskrun import flask_run import datab.social_database as db app =", "db.get_all_different_people(room_id, user_id) if len(possible_questions) > 0: asked_about_id = random.sample(possible_users_to_ask, 1)", "else: possible_questions = db.get_all_questions(room_id) if len(possible_questions) > 0: question_id =", "root_dir(): # pragma: no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root():", "# SELECT 'question' FROM 'Question' WHERE 'id' = 3 value", "text_id)) # if commented the first answer will be the", "\" \"FROM users u \" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name =", "asked_about_id = [] if len(possible_questions) > 0: question_id = random.sample(possible_questions,", "id_user}) @app.route('/join_room') def join_room(): room_id = request.args.get('room_id') email = request.args.get('email')", "users u ON (qq.asked_user_id = u.id) \" \"INNER JOIN room_members", "value[0][0] correct_answer_id = value[0][1] question_id = value[0][2] value = db.exec_query(\"SELECT", "values = db.exec_query(\"SELECT u.email , COUNT(qq.id) \" \"FROM quiz_question qq", "mail email = request.args.get('email') print(email) if email is None: return", "json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def get_question(): room_id = int(request.args.get('room_id')) user_id", "is None: return json.dumps({\"error\": \"Internal server error\"}) return json.dumps({ \"correct\":", "\"INNER JOIN room_members rm ON (u.id = rm.user_id) \" \"WHERE", "users u \" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name = value[0][0] question_text", "value = db.exec_query(\"SELECT u.email \" \"FROM users u \" \"WHERE", "0: question_id = random.sample(possible_questions, 1) if len(possible_users_to_ask) > 0: asked_about_id", "= request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id, user_id)", "\"Not available questions for this user in this room\"}) @app.route('/post_quiz_answer')", "len(value) > 0: text = value[0][0] else: text = \"something", "u ON (qq.asked_user_id = u.id) \" \"INNER JOIN room_members rm", "json.dumps({ \"correct\": answered_id == correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id,", "flaskrun.flaskrun import flask_run import datab.social_database as db app = Flask(__name__)", "COUNT(qq.id) DESC\", [room_id]) ranking = [] for row in values:", "value is None: return json.dumps({\"error\": \"Internal server error\"}) return json.dumps({", "if json_data is None: return json.dumps({\"error\": \"no JSON found\"}), 404", "q.question \" \"FROM question q \" \"WHERE q.id = %s\",", "(qq.asked_user_id = u.id) \" \"INNER JOIN room_members rm ON (u.id", "filename) abort(403) @app.route('/register') def register(): # To obtain the mail", "text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE id", "= value[0][0] value = db.exec_query(\"SELECT q.question \" \"FROM question q", "json import os.path import random import re from flask import", "%s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0] value = db.exec_query(\"SELECT", "room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='finished' WHERE r.id", "db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s\", [answer_id, quiz_question_id])", "@app.route('/answered_room') def answered_room(): room_id = request.args.get('room_id') user_id = request.args.get('user_id') values", "json.dumps({\"info\": \"The room has been opened successfully\", \"status\": \"started\"}) @app.route('/close_room')", "json.dumps({\"error\": \"Not available questions for this user in this room\"})", "= random.sample(possible_users_to_ask, 1) if len(question_id) > 0 and 0 <", "INNER JOIN values = db.exec_query(\"SELECT u.email , COUNT(qq.id) \" \"FROM", "question (room_id, question) VALUES (%s, %s)\", [room_id, q]) return json.dumps({\"info\":", "= db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers = [] (answer_id, text_id) =", "if len(value) > 0: text = value[0][0] else: text =", ", COUNT(qq.id) \" \"FROM quiz_question qq \" \"INNER JOIN users", "db.exec_query(\"SELECT u.email \" \"FROM users u \" \"WHERE u.id=%s\", [asked_about_id[0]])", "in values: response.append({\"id\": val[0], \"text\": val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers',", "in values: ranking.append({\"email\": row[0], \"correct\": row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status')", "quiz_question \" \"WHERE asked_user_id = %s AND about_user_id = %s", "db.exec_query(\"UPDATE room r SET r.status='closed' WHERE r.id = %s\", [room_id])", "= %s \", [correct_answer_id]) if len(value) > 0: text =", "db.exec_query(\"UPDATE quiz_question SET answered_id = %s WHERE id = %s\",", "\"something when wrong\" if value is None: return json.dumps({\"error\": \"Internal", "question_text) return json.dumps({ \"id\": quiz_question_id, \"question\": question_text, \"answers\": answer_json })", "a INNER JOIN question q \" \"WHERE a.question_id = q.id", "> 0: asked_about_id = random.sample(possible_users_to_ask, 1) else: possible_users_to_ask = db.get_all_different_people(room_id,", "return json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions') def get_room_question(): room_id =", "# SELECT status FROM Room WHERE id = 1 values", "@app.route('/open_room') def open_room(): room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r", "\" \"WHERE a.question_id = q.id AND q.room_id = %s AND", "@app.route('/get_quiz_question') def get_question(): room_id = int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions", "ranking.append({\"email\": row[0], \"correct\": row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room():", "asked_about_id = random.sample(possible_users_to_ask, 1) else: possible_users_to_ask = db.get_all_different_people(room_id, user_id) if", "= %s\", [quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id", "ranking = [] for row in values: ranking.append({\"email\": row[0], \"correct\":", "= re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7) def root_dir(): # pragma:", "1]) db.exec_many_query(\"INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)\", values) return", "correct_answer_id = value[0][1] question_id = value[0][2] value = db.exec_query(\"SELECT a.answer", "else: text = \"something when wrong\" if value is None:", "(room_id, question) VALUES (%s, %s)\", [room_id, q]) return json.dumps({\"info\": \"Data", "0}) @app.route('/get_user_id') def get_user_id(): email = request.args.get('email') id_user = db.register_or_get_email(email)", "r.status FROM room r WHERE r.creator=%s\", [user_id]) response = []", "%s WHERE id = %s\", [quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT", "1, len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) #", "\"text\": val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data", "other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers = [] (answer_id, text_id)", "question_id = value[0][2] value = db.exec_query(\"SELECT a.answer FROM answer a", "# SELECT id, COUNT(a.id), COUNT(a.id) FROM Room r INNER JOIN", "questions: db.exec_query(\"INSERT INTO question (room_id, question) VALUES (%s, %s)\", [room_id,", "JOIN question q \" \"WHERE a.question_id = q.id AND q.room_id", "def post_room_answers(): json_data = request.get_json() if json_data is None: return", "post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET", "\"status\": values[0][0] }) @app.route('/get_room_questions') def get_room_question(): room_id = request.args.get('room_id') values", "= %s\", [quiz_question_id]) answered_id = value[0][0] correct_answer_id = value[0][1] question_id", "value[0][1] question_id = value[0][2] value = db.exec_query(\"SELECT a.answer FROM answer", "found\"}), 404 user_id = json_data[\"user_id\"] values = [] for a", "val[0], \"status\": val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room(): json_data", "value[0][0] else: text = \"something when wrong\" if value is", "room_id}) @app.route('/get_rooms') def get_rooms(): user_id = request.args.get('user_id') values = db.exec_query(\"SELECT", "# -*- coding: utf-8 -*- import json import os.path import", "= db.exec_query(\"SELECT id, question FROM question WHERE room_id = %s\",", "db.get_non_answered_people(room_id, user_id) question_id = [] asked_about_id = [] if len(possible_questions)", "room_id = request.args.get('room_id') values = db.exec_query(\"SELECT id, question FROM question", "question_text = \"What did %s answer to '%s' ?\" %", "qq.question_id \" \"FROM quiz_question qq \" \"WHERE qq.id = %s\",", "if email is None: return json.dumps({}) id_user = db.register_or_get_email(email) return", "request.args.get('user_id') values = db.exec_query(\"SELECT r.id, r.status FROM room r WHERE", "closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def finish_room(): room_id = request.args.get('room_id')", "received\"}) @app.route('/open_room') def open_room(): room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room", "\"WHERE qq.id = %s\", [quiz_question_id]) answered_id = value[0][0] correct_answer_id =", "answer_json }) else: return json.dumps({\"error\": \"Not available questions for this", "json.dumps({\"error\": \"Internal server error\"}) return json.dumps({ \"correct\": answered_id == correct_answer_id,", "= request.get_json() if json_data is None: return json.dumps({\"error\": \"no JSON", "def close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='closed'", "values = db.exec_query(\"SELECT a.id \" \"FROM answer a INNER JOIN", "asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s\", [answer_id,", "json_data = request.get_json() if json_data is None: return json.dumps({\"error\": \"no", "u.email \" \"FROM users u \" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name", "request.args.get('room_id') email = request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members", "this user in this room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id =", "room_id = int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id)", "accept certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7)", "fill_room(): json_data = request.get_json() if json_data is None: return json.dumps({\"error\":", "\" \"INNER JOIN users u ON (qq.asked_user_id = u.id) \"", "return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data = request.get_json()", "= %s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers - 1,", "INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data", "value[0][0] value = db.exec_query(\"SELECT u.email \" \"FROM users u \"", "correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\": text} }) if", "AND rm.room_id = %s \" \"GROUP BY u.email \" \"ORDER", "\" \"WHERE qq.id = %s\", [quiz_question_id]) answered_id = value[0][0] correct_answer_id", "def root_dir(): # pragma: no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def", "pragma: no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return index(\"index2.html\")", "quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers - 1, len(other_users)) > 0:", "if json_data is None: return json.dumps({\"error\": \"no JSON found\"}) else:", "question FROM question WHERE room_id = %s\", [room_id]) response =", "\"started\"}) @app.route('/close_room') def close_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r", "@app.route('/fill_room', methods=['POST']) def fill_room(): json_data = request.get_json() if json_data is", "\"id\": quiz_question_id, \"question\": question_text, \"answers\": answer_json }) else: return json.dumps({\"error\":", "0: for i in range(min(numberOfAnswers - 1, len(other_users))): (answer_id, text_id)", "db.exec_query(\"INSERT INTO room (creator) VALUES (%s)\", [user_id]) return json.dumps({\"id\": room_id})", "[] for val in values: response.append({\"id\": val[0], \"text\": val[1]}) return", "[room_id]) ranking = [] for row in values: ranking.append({\"email\": row[0],", "only accept certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4", "r.status='finished' WHERE r.id = %s\", [room_id]) # for # SELECT", "in answers: answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id) # SELECT 'question'", "return json.dumps({\"error\": \"no JSON found\"}) else: room_id = json_data[\"room_id\"] questions", "value[0][2] value = db.exec_query(\"SELECT a.answer FROM answer a WHERE a.id", "is None: return json.dumps({\"error\": \"no JSON found\"}) else: room_id =", "in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO", "room r SET r.status='closed' WHERE r.id = %s\", [room_id]) return", "db.exec_query(\"SELECT a.answer FROM answer a WHERE a.id = %s \",", "if len(possible_users_to_ask) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) else: possible_users_to_ask", "\"correct\": answered_id == correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\":", "Room WHERE id = 1 values = db.exec_query(\"SELECT status FROM", "post_room_answers(): json_data = request.get_json() if json_data is None: return json.dumps({\"error\":", "one random.shuffle(answers) answer_json = [] for (answer_id, text_id) in answers:", "[room_id]) return json.dumps({\"info\": \"The room has been opened successfully\", \"status\":", "value[0][0] question_text = \"What did %s answer to '%s' ?\"", "# if commented the first answer will be the correct", "answer a WHERE a.id = %s \", [correct_answer_id]) if len(value)", "import Flask, send_from_directory from flask import request, abort from flaskrun.flaskrun", "= db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE id =", "question_text = value[0][0] value = db.exec_query(\"SELECT u.email \" \"FROM users", "r INNER JOIN values = db.exec_query(\"SELECT u.email , COUNT(qq.id) \"", "[room_id]) response = [] for val in values: response.append({\"id\": val[0],", "answers.append((answer_id, text_id)) if min(numberOfAnswers - 1, len(other_users)) > 0: for", "possible_users_to_ask = db.get_all_different_people(room_id, user_id) if len(possible_questions) > 0: asked_about_id =", "= db.get_non_answered_people(room_id, user_id) question_id = [] asked_about_id = [] if", "def register(): # To obtain the mail email = request.args.get('email')", "[] for val in values: response.append({\"id\": val[0], \"status\": val[1]}) return", "= json_data[\"user_id\"] values = [] for a in json_data[\"answers\"]: values.append((a[\"id\"],", "= db.exec_query(\"SELECT status FROM room WHERE id = %s\", [room_id])", "VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def get_question(): room_id", "request.args.get('room_id') # SELECT status FROM Room WHERE id = 1", "answered_id = %s WHERE id = %s\", [quiz_answer_id, quiz_question_id]) value", "open_room(): room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r SET r.status='started'", "rm.room_id = %s \" \"GROUP BY u.email \" \"ORDER BY", "ranking}) @app.route('/room_status') def status_room(): room_id = request.args.get('room_id') # SELECT status", "question WHERE room_id = %s\", [room_id]) response = [] for", "= [] if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1)", "if commented the first answer will be the correct one", "WHERE a.id = %s \", [correct_answer_id]) if len(value) > 0:", "text = \"something when wrong\" if value is None: return", "@app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data = request.get_json() if json_data is", "= [] asked_about_id = [] if len(possible_questions) > 0: question_id", "room has been opened successfully\", \"status\": \"started\"}) @app.route('/close_room') def close_room():", "possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id = [] asked_about_id = []", "\"INNER JOIN users u ON (qq.asked_user_id = u.id) \" \"INNER", "= db.get_all_different_people(room_id, user_id) if len(possible_questions) > 0: asked_about_id = random.sample(possible_users_to_ask,", "this room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id =", "rm.user_id) \" \"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id = %s", "int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask =", "= db.exec_query(\"SELECT u.email \" \"FROM users u \" \"WHERE u.id=%s\",", "import random import re from flask import Flask, send_from_directory from", "answer_id, \"text\": text_id}) print(quiz_question_id) # SELECT 'question' FROM 'Question' WHERE", "room_id = json_data[\"room_id\"] questions = json_data[\"question\"] for q in questions:", "[] for a in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) -", "question_id[0]]) quiz_question_id = value[0][0] value = db.exec_query(\"SELECT q.question \" \"FROM", "status FROM room WHERE id = %s\", [room_id]) return json.dumps({", "\" \"FROM answer a INNER JOIN question q \" \"WHERE", "u.email \" \"ORDER BY COUNT(qq.id) DESC\", [room_id]) ranking = []", "0: asked_about_id = random.sample(possible_users_to_ask, 1) if len(question_id) > 0 and", "id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room(): room_id", "Flask, send_from_directory from flask import request, abort from flaskrun.flaskrun import", "join_room(): room_id = request.args.get('room_id') email = request.args.get('email') user_id = db.register_or_get_email(email)", "SELECT id, COUNT(a.id), COUNT(a.id) FROM Room r INNER JOIN values", "for i in range(min(numberOfAnswers - 1, len(other_users))): (answer_id, text_id) =", "answered_room(): room_id = request.args.get('room_id') user_id = request.args.get('user_id') values = db.exec_query(\"SELECT", "= db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers", "rm ON (u.id = rm.user_id) \" \"WHERE qq.correct_answer_id = qq.answered_id", "= db.get_all_questions(room_id) if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1)", "def get_question(): room_id = int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions =", "question_text, \"answers\": answer_json }) else: return json.dumps({\"error\": \"Not available questions", "for q in questions: db.exec_query(\"INSERT INTO question (room_id, question) VALUES", "return json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id') def get_user_id(): email =", "?\" % (user_name, question_text) return json.dumps({ \"id\": quiz_question_id, \"question\": question_text,", "%s\", [question_id[0]]) question_text = value[0][0] value = db.exec_query(\"SELECT u.email \"", "JOIN users u ON (qq.asked_user_id = u.id) \" \"INNER JOIN", "return json.dumps({ \"id\": quiz_question_id, \"question\": question_text, \"answers\": answer_json }) else:", "values) return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def get_question(): room_id =", "= q.id AND q.room_id = %s AND a.user_id= %s\", [room_id,", "db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers =", "len(values) > 0}) @app.route('/get_user_id') def get_user_id(): email = request.args.get('email') id_user", "text_id)) if min(numberOfAnswers - 1, len(other_users)) > 0: for i", "in questions: db.exec_query(\"INSERT INTO question (room_id, question) VALUES (%s, %s)\",", "qq.correct_answer_id = qq.answered_id AND rm.room_id = %s \" \"GROUP BY", "cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return index(\"index2.html\") @app.route('/<path:filename>') def", "q.room_id = %s AND a.user_id= %s\", [room_id, user_id]) return json.dumps({\"answered\":", "request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO room (creator) VALUES (%s)\", [user_id])", "answer (question_id, user_id, answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data received\"})", "= \"something when wrong\" if value is None: return json.dumps({\"error\":", "r.id = %s\", [room_id]) # for # SELECT id, COUNT(a.id),", "}) else: return json.dumps({\"error\": \"Not available questions for this user", "request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id = %s", "qq \" \"INNER JOIN users u ON (qq.asked_user_id = u.id)", "request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id = %s WHERE id =", "# pragma: no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return", "json_data[\"user_id\"] values = [] for a in json_data[\"answers\"]: values.append((a[\"id\"], user_id,", "len(possible_questions) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) if len(question_id) >", "[room_id]) # for # SELECT id, COUNT(a.id), COUNT(a.id) FROM Room", "room r SET r.status='started' WHERE r.id = %s\", [room_id]) return", "value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM quiz_question qq", "= 4 random.seed(7) def root_dir(): # pragma: no cover return", "> 0 and 0 < len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0],", "'Question' WHERE 'id' = 3 value = db.exec_query(\"SELECT id \"", "for val in values: response.append({\"id\": val[0], \"text\": val[1]}) return json.dumps({\"questions\":", "value = db.exec_query(\"SELECT q.question \" \"FROM question q \" \"WHERE", "WHERE id = %s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers", "id_user}) @app.route('/create_room') def create_room(): user_id = request.args.get('user_id') room_id = db.exec_query(\"INSERT", "def open_room(): room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r SET", "else: return json.dumps({\"error\": \"Not available questions for this user in", "user_id = request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO room (creator) VALUES", "(room_id, user_id) VALUES (%s,%s)\", [room_id, user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room')", "\"GROUP BY u.email \" \"ORDER BY COUNT(qq.id) DESC\", [room_id]) ranking", "json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions') def get_room_question(): room_id = request.args.get('room_id')", "\" \"FROM quiz_question qq \" \"WHERE qq.id = %s\", [quiz_question_id])", "values: ranking.append({\"email\": row[0], \"correct\": row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def", "random.sample(possible_questions, 1) else: possible_questions = db.get_all_questions(room_id) if len(possible_questions) > 0:", "certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7) def", "a.id \" \"FROM answer a INNER JOIN question q \"", "INTO room (creator) VALUES (%s)\", [user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms')", "question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\": text} }) if __name__ ==", "quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM quiz_question", "\"The room has been closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def", "4 random.seed(7) def root_dir(): # pragma: no cover return os.path.abspath(os.path.dirname(__file__))", "%s\", [room_id]) return json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions') def get_room_question():", "len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) else: possible_questions =", "id = 1 values = db.exec_query(\"SELECT status FROM room WHERE", "= db.exec_query(\"SELECT a.id \" \"FROM answer a INNER JOIN question", "db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room(): user_id = request.args.get('user_id')", "0: asked_about_id = random.sample(possible_users_to_ask, 1) else: possible_users_to_ask = db.get_all_different_people(room_id, user_id)", "for this user in this room\"}) @app.route('/post_quiz_answer') def post_answer(): quiz_question_id", "= db.exec_query(\"SELECT id \" \"FROM quiz_question \" \"WHERE asked_user_id =", "SET answered_id = %s WHERE id = %s\", [quiz_answer_id, quiz_question_id])", "= Flask(__name__) # Regular expression to only accept certain files", "= request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='finished' WHERE r.id =", "len(possible_users_to_ask) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) else: possible_users_to_ask =", "# Regular expression to only accept certain files fileChecker =", "db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s WHERE id = %s\",", "= 1 values = db.exec_query(\"SELECT status FROM room WHERE id", "VALUES (%s,%s)\", [room_id, user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room') def answered_room():", "create_room(): user_id = request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO room (creator)", "To obtain the mail email = request.args.get('email') print(email) if email", "random.sample(possible_users_to_ask, 1) else: possible_users_to_ask = db.get_all_different_people(room_id, user_id) if len(possible_questions) >", "= value[0][0] else: text = \"something when wrong\" if value", "= %s AND about_user_id = %s AND question_id = %s\",", "send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register') def register(): # To obtain", "WHERE r.id = %s\", [room_id]) return json.dumps({\"info\": \"The room has", "is None: return json.dumps({\"error\": \"no JSON found\"}), 404 user_id =", "AND a.user_id= %s\", [room_id, user_id]) return json.dumps({\"answered\": len(values) > 0})", "%s AND question_id = %s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id =", "values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO answer (question_id,", "q in questions: db.exec_query(\"INSERT INTO question (room_id, question) VALUES (%s,", "question_id = random.sample(possible_questions, 1) else: possible_questions = db.get_all_questions(room_id) if len(possible_questions)", "\"FROM quiz_question qq \" \"WHERE qq.id = %s\", [quiz_question_id]) answered_id", "id, COUNT(a.id), COUNT(a.id) FROM Room r INNER JOIN values =", "= value[0][0] value = db.exec_query(\"SELECT u.email \" \"FROM users u", "= %s\", [room_id]) return json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions') def", "json.dumps({ \"id\": quiz_question_id, \"question\": question_text, \"answers\": answer_json }) else: return", "COUNT(a.id), COUNT(a.id) FROM Room r INNER JOIN values = db.exec_query(\"SELECT", "request.get_json() if json_data is None: return json.dumps({\"error\": \"no JSON found\"}),", "be the correct one random.shuffle(answers) answer_json = [] for (answer_id,", "status_room(): room_id = request.args.get('room_id') # SELECT status FROM Room WHERE", "= db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id = []", "request.args.get('email') id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room():", "> 0: text = value[0][0] else: text = \"something when", "def get_room_question(): room_id = request.args.get('room_id') values = db.exec_query(\"SELECT id, question", "answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id) # SELECT 'question' FROM 'Question'", "= random.sample(possible_questions, 1) else: possible_questions = db.get_all_questions(room_id) if len(possible_questions) >", "= request.args.get('room_id') # SELECT status FROM Room WHERE id =", "@app.route('/register') def register(): # To obtain the mail email =", "@app.route('/get_rooms') def get_rooms(): user_id = request.args.get('user_id') values = db.exec_query(\"SELECT r.id,", "FROM room WHERE id = %s\", [room_id]) return json.dumps({ \"status\":", "text_id}) print(quiz_question_id) # SELECT 'question' FROM 'Question' WHERE 'id' =", "quiz_question qq \" \"INNER JOIN users u ON (qq.asked_user_id =", "return json.dumps({\"error\": \"Not available questions for this user in this", "commented the first answer will be the correct one random.shuffle(answers)", "[] for row in values: ranking.append({\"email\": row[0], \"correct\": row[1]}) return", "db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id = [] asked_about_id", "as db app = Flask(__name__) # Regular expression to only", "= value[0][1] question_id = value[0][2] value = db.exec_query(\"SELECT a.answer FROM", "def get_user_id(): email = request.args.get('email') id_user = db.register_or_get_email(email) return json.dumps({\"id\":", "= db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room(): user_id =", "-*- coding: utf-8 -*- import json import os.path import random", "db.exec_many_query(\"INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\":", "AND about_user_id = %s AND question_id = %s\", [user_id, asked_about_id[0],", "def create_room(): user_id = request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO room", "val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room(): json_data = request.get_json()", "0: question_id = random.sample(possible_questions, 1) else: possible_questions = db.get_all_questions(room_id) if", "values = db.exec_query(\"SELECT r.id, r.status FROM room r WHERE r.creator=%s\",", "@app.route('/get_room_questions') def get_room_question(): room_id = request.args.get('room_id') values = db.exec_query(\"SELECT id,", "finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='finished' WHERE", "1) if len(possible_users_to_ask) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) else:", "room_members rm ON (u.id = rm.user_id) \" \"WHERE qq.correct_answer_id =", "flask import Flask, send_from_directory from flask import request, abort from", "[room_id, q]) return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def open_room(): room_id", "db.exec_query(\"UPDATE room r SET r.status='finished' WHERE r.id = %s\", [room_id])", "SELECT status FROM Room WHERE id = 1 values =", "db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) # if commented the first answer", "None: return json.dumps({\"error\": \"Internal server error\"}) return json.dumps({ \"correct\": answered_id", "r.status='started' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\": \"The room", "room_members (room_id, user_id) VALUES (%s,%s)\", [room_id, user_id]) return json.dumps({\"id\": user_id})", "COUNT(qq.id) \" \"FROM quiz_question qq \" \"INNER JOIN users u", "the first answer will be the correct one random.shuffle(answers) answer_json", "range(min(numberOfAnswers - 1, len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id,", "register(): # To obtain the mail email = request.args.get('email') print(email)", "\"correct\": row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room(): room_id =", "a.id = %s \", [correct_answer_id]) if len(value) > 0: text", "request.args.get('room_id') values = db.exec_query(\"SELECT id, question FROM question WHERE room_id", "%s\", [quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \"", "row[0], \"correct\": row[1]}) return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room(): room_id", "error\"}) return json.dumps({ \"correct\": answered_id == correct_answer_id, \"question\": question_id, \"correct_answer\":", "[user_id]) response = [] for val in values: response.append({\"id\": val[0],", "= request.args.get('room_id') user_id = request.args.get('user_id') values = db.exec_query(\"SELECT a.id \"", "= request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='closed' WHERE r.id =", "request.args.get('room_id') user_id = request.args.get('user_id') values = db.exec_query(\"SELECT a.id \" \"FROM", "[] for (answer_id, text_id) in answers: answer_json.append({\"id\": answer_id, \"text\": text_id})", "None: return json.dumps({\"error\": \"no JSON found\"}) else: room_id = json_data[\"room_id\"]", "answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question') def get_question():", "r.creator=%s\", [user_id]) response = [] for val in values: response.append({\"id\":", "json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room(): room_id = request.args.get('room_id') # SELECT", "\"WHERE asked_user_id = %s AND about_user_id = %s AND question_id", "1 values = db.exec_query(\"SELECT status FROM room WHERE id =", "\"The room has been opened successfully\", \"status\": \"started\"}) @app.route('/close_room') def", "utf-8 -*- import json import os.path import random import re", "= request.args.get('user_id') room_id = db.exec_query(\"INSERT INTO room (creator) VALUES (%s)\",", "json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room(): room_id = request.args.get('room_id') email =", "values[0][0] }) @app.route('/get_room_questions') def get_room_question(): room_id = request.args.get('room_id') values =", "email is None: return json.dumps({}) id_user = db.register_or_get_email(email) return json.dumps({\"id\":", "\"FROM quiz_question \" \"WHERE asked_user_id = %s AND about_user_id =", "\"WHERE a.question_id = q.id AND q.room_id = %s AND a.user_id=", "if value is None: return json.dumps({\"error\": \"Internal server error\"}) return", "def finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='finished'", "@app.route('/get_user_id') def get_user_id(): email = request.args.get('email') id_user = db.register_or_get_email(email) return", "id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room(): user_id", "return json.dumps({\"info\": \"The room has been opened successfully\", \"status\": \"started\"})", "> 0: asked_about_id = random.sample(possible_users_to_ask, 1) if len(question_id) > 0", "= %s\", [question_id[0]]) question_text = value[0][0] value = db.exec_query(\"SELECT u.email", "abort from flaskrun.flaskrun import flask_run import datab.social_database as db app", "JSON found\"}), 404 user_id = json_data[\"user_id\"] values = [] for", "> 0}) @app.route('/get_user_id') def get_user_id(): email = request.args.get('email') id_user =", "return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms(): user_id = request.args.get('user_id') values", "return json.dumps({\"ranking\": ranking}) @app.route('/room_status') def status_room(): room_id = request.args.get('room_id') #", "quiz_question_id = value[0][0] value = db.exec_query(\"SELECT q.question \" \"FROM question", "db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM quiz_question qq \" \"WHERE", "ON (u.id = rm.user_id) \" \"WHERE qq.correct_answer_id = qq.answered_id AND", "random.shuffle(other_users) answers = [] (answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE", "response = [] for val in values: response.append({\"id\": val[0], \"status\":", "@app.route('/join_room') def join_room(): room_id = request.args.get('room_id') email = request.args.get('email') user_id", "if min(numberOfAnswers - 1, len(other_users)) > 0: for i in", "> 0: question_id = random.sample(possible_questions, 1) if len(possible_users_to_ask) > 0:", "methods=['POST']) def post_room_answers(): json_data = request.get_json() if json_data is None:", "= int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id)", "db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users) answers = [] (answer_id, text_id) = db.get_answer(question_id[0],", "%s answer to '%s' ?\" % (user_name, question_text) return json.dumps({", "json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO answer", "user_id) if len(possible_questions) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) if", "question q \" \"WHERE a.question_id = q.id AND q.room_id =", "return json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room(): json_data = request.get_json() if", "json_data[\"question\"] for q in questions: db.exec_query(\"INSERT INTO question (room_id, question)", "has been opened successfully\", \"status\": \"started\"}) @app.route('/close_room') def close_room(): room_id", "%s\", [room_id]) return json.dumps({\"info\": \"The room has been closed successfully\",", "response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers(): json_data = request.get_json() if json_data", "min(numberOfAnswers - 1, len(other_users)) > 0: for i in range(min(numberOfAnswers", "db.get_all_questions(room_id) if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) if", "room_id = request.args.get('room_id') # SELECT status FROM Room WHERE id", "json.dumps({\"id\": id_user}) @app.route('/create_room') def create_room(): user_id = request.args.get('user_id') room_id =", "return json.dumps({ \"correct\": answered_id == correct_answer_id, \"question\": question_id, \"correct_answer\": {\"id\":", "'question' FROM 'Question' WHERE 'id' = 3 value = db.exec_query(\"SELECT", "db.exec_query(\"SELECT r.id, r.status FROM room r WHERE r.creator=%s\", [user_id]) response", "[answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers - 1, len(other_users)) >", "[] (answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET correct_answer_id=%s", "= [] for a in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values)", "user_id, a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO answer (question_id, user_id,", "= %s WHERE id = %s\", [quiz_answer_id, quiz_question_id]) value =", "value[0][0] value = db.exec_query(\"SELECT q.question \" \"FROM question q \"", "json_data[\"room_id\"] questions = json_data[\"question\"] for q in questions: db.exec_query(\"INSERT INTO", "db.exec_query(\"SELECT id, question FROM question WHERE room_id = %s\", [room_id])", "been closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def finish_room(): room_id =", "values = db.exec_query(\"SELECT id, question FROM question WHERE room_id =", "json.dumps({\"error\": \"no JSON found\"}), 404 user_id = json_data[\"user_id\"] values =", "flask import request, abort from flaskrun.flaskrun import flask_run import datab.social_database", "1) else: possible_questions = db.get_all_questions(room_id) if len(possible_questions) > 0: question_id", "r.id = %s\", [room_id]) return json.dumps({\"info\": \"The room has been", "len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0])", "db.exec_query(\"UPDATE room r SET r.status='started' WHERE r.id = %s\", [room_id])", "obtain the mail email = request.args.get('email') print(email) if email is", "\"Internal server error\"}) return json.dumps({ \"correct\": answered_id == correct_answer_id, \"question\":", "(answer_id, text_id) in answers: answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id) #", "user_id]) return json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id') def get_user_id(): email", "user_id) VALUES (%s,%s)\", [room_id, user_id]) return json.dumps({\"id\": user_id}) @app.route('/answered_room') def", "text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) # if commented the", "@app.route('/<path:filename>') def index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403)", "room_id = %s\", [room_id]) response = [] for val in", "a WHERE a.id = %s \", [correct_answer_id]) if len(value) >", "return json.dumps({}) id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/join_room') def", "= [] (answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0]) db.exec_query(\"UPDATE quiz_question SET", "answers: answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id) # SELECT 'question' FROM", "print(room_id) db.exec_query(\"UPDATE room r SET r.status='started' WHERE r.id = %s\",", "answer will be the correct one random.shuffle(answers) answer_json = []", "= db.exec_query(\"SELECT r.id, r.status FROM room r WHERE r.creator=%s\", [user_id])", "\"closed\"}) @app.route('/finish_room') def finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r", "qq.answered_id AND rm.room_id = %s \" \"GROUP BY u.email \"", "expression to only accept certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers", "get_rooms(): user_id = request.args.get('user_id') values = db.exec_query(\"SELECT r.id, r.status FROM", "\"Data received\"}) @app.route('/open_room') def open_room(): room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE", "from flask import request, abort from flaskrun.flaskrun import flask_run import", "room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='closed' WHERE r.id", "- 1, len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id))", "\", [correct_answer_id]) if len(value) > 0: text = value[0][0] else:", "(%s)\", [user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms(): user_id =", "(answer_id, text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) # if commented", "json_data is None: return json.dumps({\"error\": \"no JSON found\"}), 404 user_id", "\"FROM users u \" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name = value[0][0]", "qq.correct_answer_id, qq.question_id \" \"FROM quiz_question qq \" \"WHERE qq.id =", "\"question\": question_text, \"answers\": answer_json }) else: return json.dumps({\"error\": \"Not available", "[room_id]) return json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions') def get_room_question(): room_id", "'id' = 3 value = db.exec_query(\"SELECT id \" \"FROM quiz_question", "(question_id, user_id, answer) VALUES(%s,%s,%s)\", values) return json.dumps({\"info\": \"Data received\"}) @app.route('/get_quiz_question')", "values: response.append({\"id\": val[0], \"text\": val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST'])", "\"status\": val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room(): json_data =", "question) VALUES (%s, %s)\", [room_id, q]) return json.dumps({\"info\": \"Data received\"})", "\"question\": question_id, \"correct_answer\": {\"id\": correct_answer_id, \"text\": text} }) if __name__", "INTO room_members (room_id, user_id) VALUES (%s,%s)\", [room_id, user_id]) return json.dumps({\"id\":", "> 0: question_id = random.sample(possible_questions, 1) else: possible_questions = db.get_all_questions(room_id)", "import re from flask import Flask, send_from_directory from flask import", "- 1]) db.exec_many_query(\"INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)\", values)", "r SET r.status='finished' WHERE r.id = %s\", [room_id]) # for", "= %s\", [room_id]) return json.dumps({\"info\": \"The room has been opened", "\"Data received\"}) @app.route('/get_quiz_question') def get_question(): room_id = int(request.args.get('room_id')) user_id =", "if len(question_id) > 0 and 0 < len(asked_about_id): quiz_question_id =", "[question_id[0]]) question_text = value[0][0] value = db.exec_query(\"SELECT u.email \" \"FROM", "available questions for this user in this room\"}) @app.route('/post_quiz_answer') def", "random.shuffle(answers) answer_json = [] for (answer_id, text_id) in answers: answer_json.append({\"id\":", "in range(min(numberOfAnswers - 1, len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0], other_users[i])", "[quiz_question_id]) answered_id = value[0][0] correct_answer_id = value[0][1] question_id = value[0][2]", "json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms(): user_id = request.args.get('user_id') values =", "id = %s\", [room_id]) return json.dumps({ \"status\": values[0][0] }) @app.route('/get_room_questions')", "quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id, asked_about_id[0]) random.shuffle(other_users)", "q \" \"WHERE a.question_id = q.id AND q.room_id = %s", "if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register') def register():", "get_room_question(): room_id = request.args.get('room_id') values = db.exec_query(\"SELECT id, question FROM", "\"ORDER BY COUNT(qq.id) DESC\", [room_id]) ranking = [] for row", "\"no JSON found\"}), 404 user_id = json_data[\"user_id\"] values = []", "= db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)\", [room_id,", "room r SET r.status='finished' WHERE r.id = %s\", [room_id]) #", "}) @app.route('/get_room_questions') def get_room_question(): room_id = request.args.get('room_id') values = db.exec_query(\"SELECT", "val[0], \"text\": val[1]}) return json.dumps({\"questions\": response}) @app.route('/post_room_answers', methods=['POST']) def post_room_answers():", "= db.exec_query(\"SELECT q.question \" \"FROM question q \" \"WHERE q.id", "request.get_json() if json_data is None: return json.dumps({\"error\": \"no JSON found\"})", "r.status='closed' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\": \"The room", "first answer will be the correct one random.shuffle(answers) answer_json =", "print(email) if email is None: return json.dumps({}) id_user = db.register_or_get_email(email)", "db.exec_query(\"SELECT id \" \"FROM quiz_question \" \"WHERE asked_user_id = %s", "= request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id =", "\" \"WHERE asked_user_id = %s AND about_user_id = %s AND", "q]) return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def open_room(): room_id =", "AND q.room_id = %s AND a.user_id= %s\", [room_id, user_id]) return", "INNER JOIN question q \" \"WHERE a.question_id = q.id AND", "[] if len(possible_questions) > 0: question_id = random.sample(possible_questions, 1) else:", "r.id, r.status FROM room r WHERE r.creator=%s\", [user_id]) response =", "= db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id, qq.question_id \" \"FROM quiz_question qq \"", "re from flask import Flask, send_from_directory from flask import request,", "def post_answer(): quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question", "answered_id = value[0][0] correct_answer_id = value[0][1] question_id = value[0][2] value", "random import re from flask import Flask, send_from_directory from flask", "SET r.status='finished' WHERE r.id = %s\", [room_id]) # for #", "to only accept certain files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers =", "FROM 'Question' WHERE 'id' = 3 value = db.exec_query(\"SELECT id", "text = value[0][0] else: text = \"something when wrong\" if", "print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO answer (question_id, user_id, answer) VALUES(%s,%s,%s)\",", "= 3 value = db.exec_query(\"SELECT id \" \"FROM quiz_question \"", "= %s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0] value =", "random.sample(possible_questions, 1) if len(possible_users_to_ask) > 0: asked_about_id = random.sample(possible_users_to_ask, 1)", "import os.path import random import re from flask import Flask,", "INTO question (room_id, question) VALUES (%s, %s)\", [room_id, q]) return", "asked_about_id = random.sample(possible_users_to_ask, 1) if len(question_id) > 0 and 0", "= request.args.get('email') id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user}) @app.route('/create_room') def", "return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def open_room(): room_id = request.args.get('room_id')", "SET r.status='started' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\": \"The", "= qq.answered_id AND rm.room_id = %s \" \"GROUP BY u.email", "SET correct_answer_id=%s WHERE id = %s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id))", "= %s AND question_id = %s\", [user_id, asked_about_id[0], question_id[0]]) quiz_question_id", "\" \"ORDER BY COUNT(qq.id) DESC\", [room_id]) ranking = [] for", "= value[0][2] value = db.exec_query(\"SELECT a.answer FROM answer a WHERE", "\"correct_answer\": {\"id\": correct_answer_id, \"text\": text} }) if __name__ == '__main__':", "= %s \" \"GROUP BY u.email \" \"ORDER BY COUNT(qq.id)", "\" \"GROUP BY u.email \" \"ORDER BY COUNT(qq.id) DESC\", [room_id])", "\" \"FROM quiz_question qq \" \"INNER JOIN users u ON", "# To obtain the mail email = request.args.get('email') print(email) if", "return json.dumps({\"id\": id_user}) @app.route('/join_room') def join_room(): room_id = request.args.get('room_id') email", "app = Flask(__name__) # Regular expression to only accept certain", "\"What did %s answer to '%s' ?\" % (user_name, question_text)", "1, len(other_users)) > 0: for i in range(min(numberOfAnswers - 1,", "response.append({\"id\": val[0], \"status\": val[1]}) return json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room():", "wrong\" if value is None: return json.dumps({\"error\": \"Internal server error\"})", "WHERE r.creator=%s\", [user_id]) response = [] for val in values:", "send_from_directory from flask import request, abort from flaskrun.flaskrun import flask_run", "status FROM Room WHERE id = 1 values = db.exec_query(\"SELECT", "if len(possible_questions) > 0: asked_about_id = random.sample(possible_users_to_ask, 1) if len(question_id)", "row in values: ranking.append({\"email\": row[0], \"correct\": row[1]}) return json.dumps({\"ranking\": ranking})", "ON (qq.asked_user_id = u.id) \" \"INNER JOIN room_members rm ON", "JOIN values = db.exec_query(\"SELECT u.email , COUNT(qq.id) \" \"FROM quiz_question", "= \"What did %s answer to '%s' ?\" % (user_name,", "= rm.user_id) \" \"WHERE qq.correct_answer_id = qq.answered_id AND rm.room_id =", "request.args.get('email') print(email) if email is None: return json.dumps({}) id_user =", "COUNT(a.id) FROM Room r INNER JOIN values = db.exec_query(\"SELECT u.email", "for (answer_id, text_id) in answers: answer_json.append({\"id\": answer_id, \"text\": text_id}) print(quiz_question_id)", "1) else: possible_users_to_ask = db.get_all_different_people(room_id, user_id) if len(possible_questions) > 0:", "import datab.social_database as db app = Flask(__name__) # Regular expression", "db.exec_query(\"SELECT u.email , COUNT(qq.id) \" \"FROM quiz_question qq \" \"INNER", "qq.id = %s\", [quiz_question_id]) answered_id = value[0][0] correct_answer_id = value[0][1]", "= [] for row in values: ranking.append({\"email\": row[0], \"correct\": row[1]})", "\" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name = value[0][0] question_text = \"What", "re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7) def root_dir(): # pragma: no", "import json import os.path import random import re from flask", "answer to '%s' ?\" % (user_name, question_text) return json.dumps({ \"id\":", "DESC\", [room_id]) ranking = [] for row in values: ranking.append({\"email\":", "for val in values: response.append({\"id\": val[0], \"status\": val[1]}) return json.dumps(response)", "quiz_question_id = request.args.get('quiz_question_id') quiz_answer_id = request.args.get('quiz_answer_id') db.exec_query(\"UPDATE quiz_question SET answered_id", "int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id", "u \" \"WHERE u.id=%s\", [asked_about_id[0]]) user_name = value[0][0] question_text =", "files fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7) def root_dir():", "r WHERE r.creator=%s\", [user_id]) response = [] for val in", "fileChecker = re.compile(r\"(.*\\.js|.*\\.html|.*\\.png|.*\\.css|.*\\.map)$\") numberOfAnswers = 4 random.seed(7) def root_dir(): #", "\"no JSON found\"}) else: room_id = json_data[\"room_id\"] questions = json_data[\"question\"]", "i in range(min(numberOfAnswers - 1, len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0],", "answers.append((answer_id, text_id)) # if commented the first answer will be", "room (creator) VALUES (%s)\", [user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def", "request.args.get('room_id') db.exec_query(\"UPDATE room r SET r.status='finished' WHERE r.id = %s\",", "r SET r.status='closed' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\":", "response = [] for val in values: response.append({\"id\": val[0], \"text\":", "= db.exec_query(\"INSERT INTO room (creator) VALUES (%s)\", [user_id]) return json.dumps({\"id\":", "0 and 0 < len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0])", "'static'), filename) abort(403) @app.route('/register') def register(): # To obtain the", "email = request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id,", "asked_user_id = %s AND about_user_id = %s AND question_id =", "\"status\": \"closed\"}) @app.route('/finish_room') def finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room", "a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT INTO answer (question_id, user_id, answer)", "FROM Room WHERE id = 1 values = db.exec_query(\"SELECT status", "user_name = value[0][0] question_text = \"What did %s answer to", "id = %s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers -", "u.id) \" \"INNER JOIN room_members rm ON (u.id = rm.user_id)", "get_user_id(): email = request.args.get('email') id_user = db.register_or_get_email(email) return json.dumps({\"id\": id_user})", "%s\", [answer_id, quiz_question_id]) answers.append((answer_id, text_id)) if min(numberOfAnswers - 1, len(other_users))", "user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO room_members (room_id, user_id) VALUES (%s,%s)\",", "user_id = request.args.get('user_id') values = db.exec_query(\"SELECT a.id \" \"FROM answer", "a in json_data[\"answers\"]: values.append((a[\"id\"], user_id, a[\"text\"])) print(values[len(values) - 1]) db.exec_many_query(\"INSERT", "JSON found\"}) else: room_id = json_data[\"room_id\"] questions = json_data[\"question\"] for", "VALUES (%s, %s)\", [room_id, q]) return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room')", "possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask = db.get_non_answered_people(room_id, user_id) question_id =", "flask_run import datab.social_database as db app = Flask(__name__) # Regular", "\"WHERE u.id=%s\", [asked_about_id[0]]) user_name = value[0][0] question_text = \"What did", "r SET r.status='started' WHERE r.id = %s\", [room_id]) return json.dumps({\"info\":", "@app.route('/finish_room') def finish_room(): room_id = request.args.get('room_id') db.exec_query(\"UPDATE room r SET", "asked_about_id[0]) random.shuffle(other_users) answers = [] (answer_id, text_id) = db.get_answer(question_id[0], asked_about_id[0])", "room has been closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def finish_room():", "len(other_users))): (answer_id, text_id) = db.get_answer(question_id[0], other_users[i]) answers.append((answer_id, text_id)) # if", "from flask import Flask, send_from_directory from flask import request, abort", "% (user_name, question_text) return json.dumps({ \"id\": quiz_question_id, \"question\": question_text, \"answers\":", "else: possible_users_to_ask = db.get_all_different_people(room_id, user_id) if len(possible_questions) > 0: asked_about_id", "no cover return os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return index(\"index2.html\") @app.route('/<path:filename>')", "db.exec_query(\"SELECT q.question \" \"FROM question q \" \"WHERE q.id =", "Room r INNER JOIN values = db.exec_query(\"SELECT u.email , COUNT(qq.id)", "a.user_id= %s\", [room_id, user_id]) return json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id')", "room_id = request.args.get('room_id') print(room_id) db.exec_query(\"UPDATE room r SET r.status='started' WHERE", "u.email , COUNT(qq.id) \" \"FROM quiz_question qq \" \"INNER JOIN", "(%s, %s)\", [room_id, q]) return json.dumps({\"info\": \"Data received\"}) @app.route('/open_room') def", "index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register') def", "%s\", [room_id]) # for # SELECT id, COUNT(a.id), COUNT(a.id) FROM", "None: return json.dumps({\"error\": \"no JSON found\"}), 404 user_id = json_data[\"user_id\"]", "< len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users = db.get_all_different_people(room_id,", "did %s answer to '%s' ?\" % (user_name, question_text) return", "fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'), filename) abort(403) @app.route('/register') def register(): #", "[room_id]) return json.dumps({\"info\": \"The room has been closed successfully\", \"status\":", "questions = json_data[\"question\"] for q in questions: db.exec_query(\"INSERT INTO question", "user_id) question_id = [] asked_about_id = [] if len(possible_questions) >", "a.question_id = q.id AND q.room_id = %s AND a.user_id= %s\",", "has been closed successfully\", \"status\": \"closed\"}) @app.route('/finish_room') def finish_room(): room_id", "quiz_question_id, \"question\": question_text, \"answers\": answer_json }) else: return json.dumps({\"error\": \"Not", "\" \"FROM question q \" \"WHERE q.id = %s\", [question_id[0]])", "%s\", [room_id, user_id]) return json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id') def", "{\"id\": correct_answer_id, \"text\": text} }) if __name__ == '__main__': flask_run(app)", "found\"}) else: room_id = json_data[\"room_id\"] questions = json_data[\"question\"] for q", "'%s' ?\" % (user_name, question_text) return json.dumps({ \"id\": quiz_question_id, \"question\":", "FROM answer a WHERE a.id = %s \", [correct_answer_id]) if", "os.path.abspath(os.path.dirname(__file__)) @app.route('/') def root(): return index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if", "possible_questions = db.get_all_questions(room_id) if len(possible_questions) > 0: question_id = random.sample(possible_questions,", "\"answers\": answer_json }) else: return json.dumps({\"error\": \"Not available questions for", "json.dumps({\"error\": \"no JSON found\"}) else: room_id = json_data[\"room_id\"] questions =", "= request.args.get('room_id') email = request.args.get('email') user_id = db.register_or_get_email(email) db.exec_query(\"REPLACE INTO", "the correct one random.shuffle(answers) answer_json = [] for (answer_id, text_id)", "0 < len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users =", "id = %s\", [quiz_answer_id, quiz_question_id]) value = db.exec_query(\"SELECT qq.answered_id, qq.correct_answer_id,", "= %s AND a.user_id= %s\", [room_id, user_id]) return json.dumps({\"answered\": len(values)", "[user_id, asked_about_id[0], question_id[0]]) quiz_question_id = value[0][0] value = db.exec_query(\"SELECT q.question", "def fill_room(): json_data = request.get_json() if json_data is None: return", "= int(request.args.get('room_id')) user_id = int(request.args.get('user_id')) possible_questions = db.get_non_answered_questions(room_id, user_id) possible_users_to_ask", "return index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(), 'static'),", "and 0 < len(asked_about_id): quiz_question_id = db.insert_quiz_question(user_id, asked_about_id[0], question_id[0]) other_users", "other_users[i]) answers.append((answer_id, text_id)) # if commented the first answer will", "q.id = %s\", [question_id[0]]) question_text = value[0][0] value = db.exec_query(\"SELECT", "a.answer FROM answer a WHERE a.id = %s \", [correct_answer_id])", "1) if len(question_id) > 0 and 0 < len(asked_about_id): quiz_question_id", "json.dumps({\"answered\": len(values) > 0}) @app.route('/get_user_id') def get_user_id(): email = request.args.get('email')", "= %s\", [room_id]) return json.dumps({\"info\": \"The room has been closed", "root(): return index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if fileChecker.match(filename): return send_from_directory(os.path.join(root_dir(),", "= db.exec_query(\"SELECT a.answer FROM answer a WHERE a.id = %s", "@app.route('/') def root(): return index(\"index2.html\") @app.route('/<path:filename>') def index(filename): if fileChecker.match(filename):", "BY u.email \" \"ORDER BY COUNT(qq.id) DESC\", [room_id]) ranking =", "def get_rooms(): user_id = request.args.get('user_id') values = db.exec_query(\"SELECT r.id, r.status", "value = db.exec_query(\"SELECT id \" \"FROM quiz_question \" \"WHERE asked_user_id", "(creator) VALUES (%s)\", [user_id]) return json.dumps({\"id\": room_id}) @app.route('/get_rooms') def get_rooms():", "q.id AND q.room_id = %s AND a.user_id= %s\", [room_id, user_id])", "WHERE room_id = %s\", [room_id]) response = [] for val", "= request.args.get('user_id') values = db.exec_query(\"SELECT a.id \" \"FROM answer a", "json.dumps(response) @app.route('/fill_room', methods=['POST']) def fill_room(): json_data = request.get_json() if json_data", "quiz_question SET correct_answer_id=%s WHERE id = %s\", [answer_id, quiz_question_id]) answers.append((answer_id," ]
[ "\"\"\"Snapshotter keeps track of files on disk, and their hashes.", "changes = 0 for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path", "error, we rather handle it here than leave # exceptions", "snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile:", "change, which may require subsequent incremential snapshot and-or ignoring the", "started twice at # same time. While it is technically", "return True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) #", "in self.hexdigest_to_snapshotfiles.items() if sf ] def get_snapshot_state(self): assert self.lock.locked() return", "FileExistsError: # This happens only if snapshot is started twice", "not even work. # Then, create/update corresponding snapshotfile objects (old", "import os import threading logger = logging.getLogger(__name__) _hash = hashlib.blake2s", "from multiple threads, and the single-operation-only mode of operation is", "same, old_snapshotfile, relative_path) continue yield snapshotfile def get_snapshot_hashes(self): assert self.lock.locked()", "snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path = self.src / relative_path", "sorted(dirs), files def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None)", "*, src_dirs, dst_dirs): changes = 0 for i, relative_dir in", "lost += 1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s disappeared", "hash_hexdigest_readable(f, *, read_buffer=1_000_000): h = _hash() while True: data =", "<= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return", "SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes = 0", "and their hashes. The hash on disk MAY change, which", "try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError: # This happens only", "# same time. While it is technically speaking upstream #", "if self.src == self.dst: # The src=dst mode should be", "import threading logger = logging.getLogger(__name__) _hash = hashlib.blake2s def hash_hexdigest_readable(f,", "result_callback=_result_cb, n=self.parallel) # We initially started with 1 extra progress.add_success()", "else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile def _result_cb(*, map_in, map_out):", "while True: data = f.read(read_buffer) if not data: break h.update(data)", "st = src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths):", "contents as-is. dst_dirs, dst_files = src_dirs, src_files else: progress.add_total(3) dst_dirs,", "relative_path in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path)", "map_out): self._add_snapshotfile(map_out) progress.add_success() return True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb,", "_cb(snapshotfile): # src may or may not be present; dst", "is not exactly flawless (the 'new operation can be started", "be made with snapshotter.lock held. This is because Snapshotter is", "to be built-in to Snapshotter, but having it there enables", "new directory: %r\", i, relative_dir) changes += 1 return changes", "not disappear between snapshot and # upload steps (e.g. Astacus", "# upload steps (e.g. Astacus controls the lifecycle of the", "mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same = 0 lost =", "True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) # We", "class Snapshotter: \"\"\"Snapshotter keeps track of files on disk, and", "object. Note that any call to public API MUST be", "import increase_worth_reporting, Progress from pathlib import Path from typing import", "{p.parent for p in files} return sorted(dirs), files def _add_snapshotfile(self,", "self.hexdigest_to_snapshotfiles = {} self.parallel = parallel self.lock = threading.Lock() def", "%s disappeared before stat, ignoring\", lost, self.src / relative_path) continue", "== snapshotfile: same += 1 if increase_worth_reporting(same): logger.debug(\"#%d. same -", "# Remove extra files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() #", "Path(src) self.dst = Path(dst) self.globs = globs self.relative_path_to_snapshotfile = {}", "can be started with old running' is intentional feature but", "for dig, sf in self.hexdigest_to_snapshotfiles.items() if sf ] def get_snapshot_state(self):", "old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost", "== self.dst: # The src=dst mode should be used if", "f.read(read_buffer) if not data: break h.update(data) return h.hexdigest() class Snapshotter:", "self.src / relative_path) continue if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64", "+= 1 return changes def _snapshot_add_missing_files(self, *, src_files, dst_files): existing", "\"\"\" from astacus.common import magic, utils from astacus.common.ipc import SnapshotFile,", "src may or may not be present; dst is present", "that is shared across operations, possibly used from multiple threads,", "details \"\"\" from astacus.common import magic, utils from astacus.common.ipc import", "+= 1 return changes def snapshot(self, *, progress: Optional[Progress] =", "track of files on disk, and their hashes. The hash", "be present; dst is present as it is in snapshot", "def _list_files(self, basepath: Path): result_files = set() for glob in", "The src=dst mode should be used if and only if", "and we can just use the src # directory contents", "ignoring the files which have changed. The output to outside", "relative_path) changes += 1 return changes def snapshot(self, *, progress:", "_snapshotfile_from_path(self, relative_path): src_path = self.src / relative_path st = src_path.stat()", "= 0 disappeared = 0 changes = 0 for i,", "1 return changes def snapshot(self, *, progress: Optional[Progress] = None):", "directory contents as-is. dst_dirs, dst_files = src_dirs, src_files else: progress.add_total(3)", "in relpath.parents: if parent.name == magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return", "disk MAY change, which may require subsequent incremential snapshot and-or", "src_path = self.src / relative_path st = src_path.stat() return SnapshotFile(relative_path=relative_path,", "changes += 1 return changes def _snapshot_add_missing_files(self, *, src_files, dst_files):", "snapshotter.lock held. This is because Snapshotter is process-wide utility that", "def _cb(snapshotfile): # src may or may not be present;", "Path(dst) self.globs = globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles = {}", "= self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost +=", "self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\", i, relative_path)", "Path from typing import Optional import base64 import hashlib import", "is process-wide utility that is shared across operations, possibly used", "0 for i, relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path = self.src", "return changes def _snapshot_add_missing_files(self, *, src_files, dst_files): existing = 0", "in # making extra symlinks and we can just use", "import magic, utils from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from", "model has empty; either plugin or configuration must supply them", "it is # known that files will not disappear between", "parallel): assert globs # model has empty; either plugin or", "self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self,", "changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) # We initially", "try: snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost += 1 if", "Note that any call to public API MUST be made", "+= 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before linking, ignoring\",", "incremential snapshot and-or ignoring the files which have changed. The", "Remove extra files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add", "path.is_file() or path.is_symlink(): continue relpath = path.relative_to(basepath) for parent in", "within the file list contained in root object. Note that", "= self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove extra files changes +=", "snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode()", "FileNotFoundError: disappeared += 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before", "return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same = 0", "relative_path dst_path = self.dst / relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False)", "= hash_hexdigest_readable(f) return snapshotfile def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success()", "self.lock = threading.Lock() def _list_files(self, basepath: Path): result_files = set()", "from pathlib import Path from typing import Optional import base64", "list contained in root object. Note that any call to", "subsequent incremential snapshot and-or ignoring the files which have changed.", "might not need to be built-in to Snapshotter, but having", "ignoring\", disappeared, src_path) continue if increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new", "happens only if snapshot is started twice at # same", "of operation is not exactly flawless (the 'new operation can", "dst_files=dst_files) progress.add_success() # Add missing files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files)", "changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add missing files changes", "there is little point in # making extra symlinks and", "data = f.read(read_buffer) if not data: break h.update(data) return h.hexdigest()", "'new operation can be started with old running' is intentional", "= self._list_dirs_and_files(self.dst) # Create missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs)", "might not even work. # Then, create/update corresponding snapshotfile objects", "This happens only if snapshot is started twice at #", "0 for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst", "is not # probably really worth it and due to", "dst_files): changes = 0 for i, relative_path in enumerate(set(dst_files).difference(src_files), 1):", "if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest =", "= self.src / relative_path st = src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns,", "lost - %s disappeared before stat, ignoring\", lost, self.src /", "f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest", "self.hexdigest_to_snapshotfiles.items() if sf ] def get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs,", "assert self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in", "- %s disappeared before stat, ignoring\", lost, self.src / relative_path)", "_snapshot_add_missing_files(self, *, src_files, dst_files): existing = 0 disappeared = 0", "def _get_snapshot_hash_list(self, relative_paths): same = 0 lost = 0 for", "typing import Optional import base64 import hashlib import logging import", "ignoring\", lost, self.src / relative_path) continue if old_snapshotfile: snapshotfile.hexdigest =", "dirs = {p.parent for p in files} return sorted(dirs), files", "= self.src / relative_path dst_path = self.dst / relative_path try:", "snapshot(self, *, progress: Optional[Progress] = None): assert self.lock.locked() if progress", "controls the lifecycle of the # files within). In that", "progress.add_success() # Remove extra files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success()", "which may require subsequent incremential snapshot and-or ignoring the files", "Path): result_files = set() for glob in self.globs: for path", "self.dst = Path(dst) self.globs = globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles", "at # same time. While it is technically speaking upstream", "increase_worth_reporting, Progress from pathlib import Path from typing import Optional", "import logging import os import threading logger = logging.getLogger(__name__) _hash", "del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path =", "dst_path = self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d.", "self.dst / relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError: #", "if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before linking, ignoring\", disappeared, src_path)", "(old # ones were already removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst)", "directory: %r\", i, relative_dir) changes += 1 return changes def", "feature but new operation should eventually replace the old). The", "only if it is # known that files will not", "relative_paths): same = 0 lost = 0 for relative_path in", "must supply them self.src = Path(src) self.dst = Path(dst) self.globs", "import Path from typing import Optional import base64 import hashlib", "def _snapshotfile_from_path(self, relative_path): src_path = self.src / relative_path st =", "result_files = set() for glob in self.globs: for path in", "# ones were already removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles", "SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items() if sf ]", "= self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost += 1 if increase_worth_reporting(lost): logger.debug(\"#%d.", "if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path]", "+= len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) # We initially started", "extra directories, but it is not # probably really worth", "h.update(data) return h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps track of files", "if snapshot is started twice at # same time. While", "mode should be used if and only if it is", "if increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new file: %r\", i -", "self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra file:", "def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h = _hash() while True: data", "existed, ignoring\", existing, src_path) continue except FileNotFoundError: disappeared += 1", "snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out)", "as it is in snapshot with snapshotfile.open_for_reading(self.dst) as f: if", "return changes def _snapshot_remove_extra_files(self, *, src_files, dst_files): changes = 0", "progress.add_success() # Add missing files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success()", "astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting, Progress", "new operation should eventually replace the old). The lock itself", "/ relative_path st = src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def", "Aiven Ltd See LICENSE for details \"\"\" from astacus.common import", "0 changes = 0 for i, relative_path in enumerate(set(src_files).difference(dst_files), 1):", "old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile: same +=", "for i, relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst /", "dst_dirs): changes = 0 for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1):", "= self._list_dirs_and_files(self.src) progress.start(1) if self.src == self.dst: # The src=dst", "remove extra directories, but it is not # probably really", "map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles,", "changes += 1 return changes def snapshot(self, *, progress: Optional[Progress]", "glob in self.globs: for path in basepath.glob(glob): if not path.is_file()", "and # upload steps (e.g. Astacus controls the lifecycle of", "ones were already removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles =", "linking, ignoring\", disappeared, src_path) continue if increase_worth_reporting(i - disappeared): logger.debug(\"#%d.", "self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles = {} self.parallel = parallel self.lock", "self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile:", "utility that is shared across operations, possibly used from multiple", "follow_symlinks=False) except FileExistsError: # This happens only if snapshot is", "API MUST be made with snapshotter.lock held. This is because", "existing, src_path) continue except FileNotFoundError: disappeared += 1 if increase_worth_reporting(disappeared):", "call to public API MUST be made with snapshotter.lock held.", "except FileNotFoundError: disappeared += 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared", "missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove extra", "not need to be built-in to Snapshotter, but having it", "not path.is_file() or path.is_symlink(): continue relpath = path.relative_to(basepath) for parent", "speaking upstream # error, we rather handle it here than", "self.lock.locked() if progress is None: progress = Progress() src_dirs, src_files", "is just root object's hash, as well as list of", "may or may not be present; dst is present as", "in files} return sorted(dirs), files def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile", "plugin or configuration must supply them self.src = Path(src) self.dst", "little point in # making extra symlinks and we can", "or may not be present; dst is present as it", "relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst / relative_path snapshotfile", "globs # model has empty; either plugin or configuration must", "during public API calls. \"\"\" def __init__(self, *, src, dst,", "extra files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add missing", "Optional import base64 import hashlib import logging import os import", "hash, as well as list of other hashes which correspond", "started with old running' is intentional feature but new operation", "SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile)", "is because Snapshotter is process-wide utility that is shared across", "only if snapshot is started twice at # same time.", "dst_dirs, dst_files = self._list_dirs_and_files(self.dst) # Create missing directories changes =", "_snapshot_remove_extra_files(self, *, src_files, dst_files): changes = 0 for i, relative_path", "files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add missing files", "worth it and due to ignored files it # actually", "held. This is because Snapshotter is process-wide utility that is", "snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d.", "1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before linking, ignoring\", disappeared,", "break h.update(data) return h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps track of", "in basepath.glob(glob): if not path.is_file() or path.is_symlink(): continue relpath =", "file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same = 0 lost = 0", "snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\", i,", "src_path) continue except FileNotFoundError: disappeared += 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d.", "*, progress: Optional[Progress] = None): assert self.lock.locked() if progress is", "files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We COULD also", "# We COULD also remove extra directories, but it is", "it and due to ignored files it # actually might", "of the # files within). In that case, there is", "*, read_buffer=1_000_000): h = _hash() while True: data = f.read(read_buffer)", "progress: Optional[Progress] = None): assert self.lock.locked() if progress is None:", "symlinks and we can just use the src # directory", "disappeared, relative_path) changes += 1 return changes def snapshot(self, *,", "we can just use the src # directory contents as-is.", "= None): assert self.lock.locked() if progress is None: progress =", "*, src_files, dst_files): changes = 0 for i, relative_path in", "disappeared += 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before linking,", "assert globs # model has empty; either plugin or configuration", "on disk MAY change, which may require subsequent incremential snapshot", "for details \"\"\" from astacus.common import magic, utils from astacus.common.ipc", "possibly used from multiple threads, and the single-operation-only mode of", "The output to outside is just root object's hash, as", "be started with old running' is intentional feature but new", "basepath.glob(glob): if not path.is_file() or path.is_symlink(): continue relpath = path.relative_to(basepath)", "dig, sf in self.hexdigest_to_snapshotfiles.items() if sf ] def get_snapshot_state(self): assert", "handled. existing += 1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed,", "disappeared, src_path) continue if increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new file:", "public API MUST be made with snapshotter.lock held. This is", "1 return changes def _snapshot_remove_extra_files(self, *, src_files, dst_files): changes =", "= self.dst / relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile)", "# This happens only if snapshot is started twice at", "# probably really worth it and due to ignored files", "across operations, possibly used from multiple threads, and the single-operation-only", "self.src / relative_path dst_path = self.dst / relative_path try: os.link(src=src_path,", "process-wide utility that is shared across operations, possibly used from", "the lifecycle of the # files within). In that case,", "if increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s disappeared before stat, ignoring\",", "magic, utils from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress", "twice at # same time. While it is technically speaking", "FileNotFoundError: lost += 1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s", "exactly flawless (the 'new operation can be started with old", "return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes =", "MUST be made with snapshotter.lock held. This is because Snapshotter", "if old_snapshotfile == snapshotfile: same += 1 if increase_worth_reporting(same): logger.debug(\"#%d.", "for p in files} return sorted(dirs), files def _add_snapshotfile(self, snapshotfile:", "operation is not exactly flawless (the 'new operation can be", "= set() for glob in self.globs: for path in basepath.glob(glob):", "already existed, ignoring\", existing, src_path) continue except FileNotFoundError: disappeared +=", "same += 1 if increase_worth_reporting(same): logger.debug(\"#%d. same - %r in", "that any call to public API MUST be made with", "SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting, Progress from pathlib", "known that files will not disappear between snapshot and #", "making extra symlinks and we can just use the src", "0 for relative_path in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile", "# actually might not even work. # Then, create/update corresponding", "if increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\", i, relative_path) changes +=", "# We initially started with 1 extra progress.add_success() return changes", "result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self, basepath: Path): files = self._list_files(basepath)", "its state during public API calls. \"\"\" def __init__(self, *,", "existing = 0 disappeared = 0 changes = 0 for", "= self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if", "dst_path = self.dst / relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile:", "create/update corresponding snapshotfile objects (old # ones were already removed)", "LICENSE for details \"\"\" from astacus.common import magic, utils from", "= hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h = _hash() while", "is # known that files will not disappear between snapshot", "hash_hexdigest_readable(f) return snapshotfile def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return", "Snapshotter: \"\"\"Snapshotter keeps track of files on disk, and their", "(the 'new operation can be started with old running' is", "changes += 1 return changes def _snapshot_remove_extra_files(self, *, src_files, dst_files):", "%r\", i, relative_path) changes += 1 return changes def _snapshot_add_missing_files(self,", "The lock itself might not need to be built-in to", "enables asserting its state during public API calls. \"\"\" def", "either plugin or configuration must supply them self.src = Path(src)", "if parent.name == magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return sorted(result_files) def", "Optional[Progress] = None): assert self.lock.locked() if progress is None: progress", "corresponding snapshotfile objects (old # ones were already removed) dst_dirs,", "snapshot with snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64", "files which have changed. The output to outside is just", "calls. \"\"\" def __init__(self, *, src, dst, globs, parallel): assert", "really worth it and due to ignored files it #", "# Add missing files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() #", "in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path) except", "dst_files): existing = 0 disappeared = 0 changes = 0", "_result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return True changes += len(snapshotfiles)", "if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\",", "+= self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We COULD also remove extra", "# Create missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() #", "import hashlib import logging import os import threading logger =", "relpath.parents: if parent.name == magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return sorted(result_files)", "it # actually might not even work. # Then, create/update", "progress.add_success() return True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel)", "dst_files=dst_files) progress.add_success() # We COULD also remove extra directories, but", "1): dst_path = self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i):", "read_buffer=1_000_000): h = _hash() while True: data = f.read(read_buffer) if", "hash on disk MAY change, which may require subsequent incremential", "self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self,", "upload steps (e.g. Astacus controls the lifecycle of the #", "their hashes. The hash on disk MAY change, which may", "for relative_path in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile =", "return changes def snapshot(self, *, progress: Optional[Progress] = None): assert", "for i, relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path = self.src /", "src_files else: progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) # Create missing", "%r in %s is same\", same, old_snapshotfile, relative_path) continue yield", "is same\", same, old_snapshotfile, relative_path) continue yield snapshotfile def get_snapshot_hashes(self):", "path.relative_to(basepath) for parent in relpath.parents: if parent.name == magic.ASTACUS_TMPDIR: break", "self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path = self.src", "same = 0 lost = 0 for relative_path in relative_paths:", "# Then, create/update corresponding snapshotfile objects (old # ones were", "dst=dst_path, follow_symlinks=False) except FileExistsError: # This happens only if snapshot", "= self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra", "threading.Lock() def _list_files(self, basepath: Path): result_files = set() for glob", "\"\"\" Copyright (c) 2020 Aiven Ltd See LICENSE for details", "= Path(dst) self.globs = globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles =", "is intentional feature but new operation should eventually replace the", "snapshotfile objects (old # ones were already removed) dst_dirs, dst_files", "relpath = path.relative_to(basepath) for parent in relpath.parents: if parent.name ==", "changes = 0 for i, relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path", "base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile def _result_cb(*, map_in,", "same - %r in %s is same\", same, old_snapshotfile, relative_path)", "dst_files = self._list_dirs_and_files(self.dst) # Create missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs,", "replace the old). The lock itself might not need to", "if increase_worth_reporting(same): logger.debug(\"#%d. same - %r in %s is same\",", "be built-in to Snapshotter, but having it there enables asserting", "enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst / relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path)", "globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles = {} self.parallel = parallel", "path in basepath.glob(glob): if not path.is_file() or path.is_symlink(): continue relpath", "# making extra symlinks and we can just use the", "old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile: same += 1 if increase_worth_reporting(same):", "in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True)", "self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We COULD also remove extra directories,", "as f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else:", "See LICENSE for details \"\"\" from astacus.common import magic, utils", "same\", same, old_snapshotfile, relative_path) continue yield snapshotfile def get_snapshot_hashes(self): assert", "to Snapshotter, but having it there enables asserting its state", "logger.debug(\"#%d. %s disappeared before linking, ignoring\", disappeared, src_path) continue if", "relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if increase_worth_reporting(i):", "= logging.getLogger(__name__) _hash = hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h", "intentional feature but new operation should eventually replace the old).", "new file: %r\", i - disappeared, relative_path) changes += 1", "it is in snapshot with snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size", "as well as list of other hashes which correspond to", "self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile", "if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile)", "handle it here than leave # exceptions not handled. existing", "and-or ignoring the files which have changed. The output to", "sorted(result_files) def _list_dirs_and_files(self, basepath: Path): files = self._list_files(basepath) dirs =", "logging.getLogger(__name__) _hash = hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h =", "src_files = self._list_dirs_and_files(self.src) progress.start(1) if self.src == self.dst: # The", "/ relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError: # This", "extra symlinks and we can just use the src #", "old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile ==", "lost = 0 for relative_path in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path)", "progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src may or may not be", "= Path(src) self.dst = Path(dst) self.globs = globs self.relative_path_to_snapshotfile =", "which have changed. The output to outside is just root", "increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s disappeared before stat, ignoring\", lost,", "hashes which correspond to files referred to within the file", "because Snapshotter is process-wide utility that is shared across operations,", "the single-operation-only mode of operation is not exactly flawless (the", "src_dirs, src_files else: progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) # Create", "1): dst_path = self.dst / relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if", "were already removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files))", "src_files, dst_files): changes = 0 for i, relative_path in enumerate(set(dst_files).difference(src_files),", "dst_dirs=dst_dirs) progress.add_success() # Remove extra files changes += self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files)", "assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs):", "use the src # directory contents as-is. dst_dirs, dst_files =", "files on disk, and their hashes. The hash on disk", "# error, we rather handle it here than leave #", "_list_files(self, basepath: Path): result_files = set() for glob in self.globs:", "= 0 changes = 0 for i, relative_path in enumerate(set(src_files).difference(dst_files),", "but it is not # probably really worth it and", "should eventually replace the old). The lock itself might not", "leave # exceptions not handled. existing += 1 if increase_worth_reporting(existing):", "not # probably really worth it and due to ignored", "and only if it is # known that files will", "\"\"\" def __init__(self, *, src, dst, globs, parallel): assert globs", "configuration must supply them self.src = Path(src) self.dst = Path(dst)", "/ relative_path) continue if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 =", "self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest:", "/ relative_path dst_path = self.dst / relative_path try: os.link(src=src_path, dst=dst_path,", "changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We COULD also remove", "disappeared before linking, ignoring\", disappeared, src_path) continue if increase_worth_reporting(i -", "None: progress = Progress() src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1) if", "due to ignored files it # actually might not even", "- %r in %s is same\", same, old_snapshotfile, relative_path) continue", "def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return True changes +=", "snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert", "if it is # known that files will not disappear", "*, src, dst, globs, parallel): assert globs # model has", "self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove extra files changes += self._snapshot_remove_extra_files(src_files=src_files,", "_hash() while True: data = f.read(read_buffer) if not data: break", "= 0 for i, relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path =", "progress.start(1) if self.src == self.dst: # The src=dst mode should", "path.is_symlink(): continue relpath = path.relative_to(basepath) for parent in relpath.parents: if", "not handled. existing += 1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s already", "0 lost = 0 for relative_path in relative_paths: old_snapshotfile =", "if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path = self.src /", "increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new file: %r\", i - disappeared,", "self._list_files(basepath) dirs = {p.parent for p in files} return sorted(dirs),", "0 disappeared = 0 changes = 0 for i, relative_path", "else: result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self, basepath: Path): files =", "technically speaking upstream # error, we rather handle it here", "here than leave # exceptions not handled. existing += 1", "outside is just root object's hash, as well as list", "asserting its state during public API calls. \"\"\" def __init__(self,", "None): assert self.lock.locked() if progress is None: progress = Progress()", "case, there is little point in # making extra symlinks", "point in # making extra symlinks and we can just", "get_snapshot_hashes(self): assert self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf", "data: break h.update(data) return h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps track", "if progress is None: progress = Progress() src_dirs, src_files =", "hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h = _hash() while True:", "_hash = hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000): h = _hash()", "any call to public API MUST be made with snapshotter.lock", "assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def", "parent in relpath.parents: if parent.name == magic.ASTACUS_TMPDIR: break else: result_files.add(relpath)", "True: data = f.read(read_buffer) if not data: break h.update(data) return", "enumerate(set(src_files).difference(dst_files), 1): src_path = self.src / relative_path dst_path = self.dst", "# model has empty; either plugin or configuration must supply", "basepath: Path): result_files = set() for glob in self.globs: for", "yield snapshotfile def get_snapshot_hashes(self): assert self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size)", "be used if and only if it is # known", "%r\", i, relative_dir) changes += 1 return changes def _snapshot_remove_extra_files(self,", "logger.debug(\"#%d. same - %r in %s is same\", same, old_snapshotfile,", "just root object's hash, as well as list of other", "= 0 for i, relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path =", "logger = logging.getLogger(__name__) _hash = hashlib.blake2s def hash_hexdigest_readable(f, *, read_buffer=1_000_000):", "return sorted(result_files) def _list_dirs_and_files(self, basepath: Path): files = self._list_files(basepath) dirs", "dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\", i, relative_dir)", "self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes", "continue relpath = path.relative_to(basepath) for parent in relpath.parents: if parent.name", "of files on disk, and their hashes. The hash on", "eventually replace the old). The lock itself might not need", "not data: break h.update(data) return h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps", "basepath: Path): files = self._list_files(basepath) dirs = {p.parent for p", "self.src == self.dst: # The src=dst mode should be used", "import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting, Progress from", "self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path = self.src / relative_path st", "relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError:", "disappeared before stat, ignoring\", lost, self.src / relative_path) continue if", "not be present; dst is present as it is in", "it is technically speaking upstream # error, we rather handle", "files will not disappear between snapshot and # upload steps", "= f.read(read_buffer) if not data: break h.update(data) return h.hexdigest() class", "with old running' is intentional feature but new operation should", "len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) # We initially started with", "def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes = 0 for i,", "self.parallel = parallel self.lock = threading.Lock() def _list_files(self, basepath: Path):", "+= 1 if increase_worth_reporting(same): logger.debug(\"#%d. same - %r in %s", "work. # Then, create/update corresponding snapshotfile objects (old # ones", "= src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same", "Add missing files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We", "%r\", i - disappeared, relative_path) changes += 1 return changes", "self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost += 1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost", "import base64 import hashlib import logging import os import threading", "+= 1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed, ignoring\", existing,", "and the single-operation-only mode of operation is not exactly flawless", "magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile", "lifecycle of the # files within). In that case, there", "operations, possibly used from multiple threads, and the single-operation-only mode", "list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src may or may not", "= snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile):", "except FileNotFoundError: lost += 1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost -", "= self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src", "to within the file list contained in root object. Note", "relative_dir) changes += 1 return changes def _snapshot_remove_extra_files(self, *, src_files,", "self.dst / relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink()", "fun=_cb, result_callback=_result_cb, n=self.parallel) # We initially started with 1 extra", "is None: progress = Progress() src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1)", "relative_path) continue if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64", "[ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items() if sf", "for glob in self.globs: for path in basepath.glob(glob): if not", "the file list contained in root object. Note that any", "_remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if", "i, relative_path) changes += 1 return changes def _snapshot_add_missing_files(self, *,", "single-operation-only mode of operation is not exactly flawless (the 'new", "changes def snapshot(self, *, progress: Optional[Progress] = None): assert self.lock.locked()", "self.globs: for path in basepath.glob(glob): if not path.is_file() or path.is_symlink():", "disappeared): logger.debug(\"#%d. new file: %r\", i - disappeared, relative_path) changes", "empty; either plugin or configuration must supply them self.src =", "root object's hash, as well as list of other hashes", "ignoring\", existing, src_path) continue except FileNotFoundError: disappeared += 1 if", "disk, and their hashes. The hash on disk MAY change,", "upstream # error, we rather handle it here than leave", "used if and only if it is # known that", "= base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile def _result_cb(*,", "files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes = 0 for", "snapshot and # upload steps (e.g. Astacus controls the lifecycle", "is shared across operations, possibly used from multiple threads, and", "old_snapshotfile, relative_path) continue yield snapshotfile def get_snapshot_hashes(self): assert self.lock.locked() return", "parallel self.lock = threading.Lock() def _list_files(self, basepath: Path): result_files =", "present; dst is present as it is in snapshot with", "self.relative_path_to_snapshotfile.get(relative_path) try: snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost += 1", "else: progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) # Create missing directories", "return h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps track of files on", "self.globs = globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles = {} self.parallel", "parent.name == magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self,", "This is because Snapshotter is process-wide utility that is shared", "self._list_dirs_and_files(self.src) progress.start(1) if self.src == self.dst: # The src=dst mode", "utils from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import", "self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add missing files changes += self._snapshot_add_missing_files(src_files=src_files,", "relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\", i,", "logging import os import threading logger = logging.getLogger(__name__) _hash =", "files} return sorted(dirs), files def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile =", "is technically speaking upstream # error, we rather handle it", "actually might not even work. # Then, create/update corresponding snapshotfile", "i, relative_dir) changes += 1 return changes def _snapshot_remove_extra_files(self, *,", "to public API MUST be made with snapshotter.lock held. This", "to outside is just root object's hash, as well as", "with snapshotter.lock held. This is because Snapshotter is process-wide utility", "self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src may", "list of other hashes which correspond to files referred to", "same time. While it is technically speaking upstream # error,", "flawless (the 'new operation can be started with old running'", "files referred to within the file list contained in root", "directories, but it is not # probably really worth it", "return sorted(dirs), files def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path,", "lock itself might not need to be built-in to Snapshotter,", "but new operation should eventually replace the old). The lock", "if increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed, ignoring\", existing, src_path) continue", "# The src=dst mode should be used if and only", "as-is. dst_dirs, dst_files = src_dirs, src_files else: progress.add_total(3) dst_dirs, dst_files", "operation should eventually replace the old). The lock itself might", "API calls. \"\"\" def __init__(self, *, src, dst, globs, parallel):", "to files referred to within the file list contained in", "%s is same\", same, old_snapshotfile, relative_path) continue yield snapshotfile def", "{} self.hexdigest_to_snapshotfiles = {} self.parallel = parallel self.lock = threading.Lock()", "= self.dst / relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError:", "before stat, ignoring\", lost, self.src / relative_path) continue if old_snapshotfile:", "dst_path.unlink() if increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\", i, relative_path) changes", "if not path.is_file() or path.is_symlink(): continue relpath = path.relative_to(basepath) for", "continue yield snapshotfile def get_snapshot_hashes(self): assert self.lock.locked() return [ SnapshotHash(hexdigest=dig,", "continue except FileNotFoundError: disappeared += 1 if increase_worth_reporting(disappeared): logger.debug(\"#%d. %s", "= {} self.hexdigest_to_snapshotfiles = {} self.parallel = parallel self.lock =", "require subsequent incremential snapshot and-or ignoring the files which have", "changes = 0 for i, relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path", "in %s is same\", same, old_snapshotfile, relative_path) continue yield snapshotfile", "on disk, and their hashes. The hash on disk MAY", "if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile", "src # directory contents as-is. dst_dirs, dst_files = src_dirs, src_files", "even work. # Then, create/update corresponding snapshotfile objects (old #", "relative_path st = src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self,", "exceptions not handled. existing += 1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s", "have changed. The output to outside is just root object's", "relative_path): src_path = self.src / relative_path st = src_path.stat() return", "# known that files will not disappear between snapshot and", "i, relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst / relative_path", "may not be present; dst is present as it is", "# directory contents as-is. dst_dirs, dst_files = src_dirs, src_files else:", "== magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self, basepath:", "sf ] def get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def", "snapshot is started twice at # same time. While it", "for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst /", "= 0 for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path =", "disappeared = 0 changes = 0 for i, relative_path in", "hashlib import logging import os import threading logger = logging.getLogger(__name__)", "correspond to files referred to within the file list contained", "return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items() if", "Snapshotter is process-wide utility that is shared across operations, possibly", "files = self._list_files(basepath) dirs = {p.parent for p in files}", "Path): files = self._list_files(basepath) dirs = {p.parent for p in", "progress is None: progress = Progress() src_dirs, src_files = self._list_dirs_and_files(self.src)", "also remove extra directories, but it is not # probably", "set() for glob in self.globs: for path in basepath.glob(glob): if", "files it # actually might not even work. # Then,", "%s already existed, ignoring\", existing, src_path) continue except FileNotFoundError: disappeared", "from astacus.common import magic, utils from astacus.common.ipc import SnapshotFile, SnapshotHash,", "file list contained in root object. Note that any call", "*, src_files, dst_files): existing = 0 disappeared = 0 changes", "n=self.parallel) # We initially started with 1 extra progress.add_success() return", "progress = Progress() src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1) if self.src", "snapshotfile = self._snapshotfile_from_path(relative_path) except FileNotFoundError: lost += 1 if increase_worth_reporting(lost):", "is in snapshot with snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size <=", "object's hash, as well as list of other hashes which", "the files which have changed. The output to outside is", "old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def", "in snapshot with snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE:", "src_files, dst_files): existing = 0 disappeared = 0 changes =", "(c) 2020 Aiven Ltd See LICENSE for details \"\"\" from", "can just use the src # directory contents as-is. dst_dirs,", "increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed, ignoring\", existing, src_path) continue except", "def _snapshot_add_missing_files(self, *, src_files, dst_files): existing = 0 disappeared =", "is present as it is in snapshot with snapshotfile.open_for_reading(self.dst) as", "# src may or may not be present; dst is", "stat, ignoring\", lost, self.src / relative_path) continue if old_snapshotfile: snapshotfile.hexdigest", "in self.globs: for path in basepath.glob(glob): if not path.is_file() or", "dst_path = self.dst / relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except", "keeps track of files on disk, and their hashes. The", "self._add_snapshotfile(map_out) progress.add_success() return True changes += len(snapshotfiles) utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb,", "globs, parallel): assert globs # model has empty; either plugin", "within). In that case, there is little point in #", "i - disappeared, relative_path) changes += 1 return changes def", "relative_path try: os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError: # This happens", "if increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\", i, relative_dir) changes +=", "hashes. The hash on disk MAY change, which may require", "in enumerate(set(src_files).difference(dst_files), 1): src_path = self.src / relative_path dst_path =", "SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same = 0 lost", "old_snapshotfile == snapshotfile: same += 1 if increase_worth_reporting(same): logger.debug(\"#%d. same", "= globs self.relative_path_to_snapshotfile = {} self.hexdigest_to_snapshotfiles = {} self.parallel =", "we rather handle it here than leave # exceptions not", "contained in root object. Note that any call to public", "self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items()", "SnapshotState from astacus.common.progress import increase_worth_reporting, Progress from pathlib import Path", "old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile", "files within). In that case, there is little point in", "+= 1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s disappeared before", "i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst / relative_dir", "- disappeared): logger.debug(\"#%d. new file: %r\", i - disappeared, relative_path)", "from typing import Optional import base64 import hashlib import logging", "dst_dirs, dst_files = src_dirs, src_files else: progress.add_total(3) dst_dirs, dst_files =", "_list_dirs_and_files(self, basepath: Path): files = self._list_files(basepath) dirs = {p.parent for", "== snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path):", "continue if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if", "snapshotfile def get_snapshot_hashes(self): assert self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for", "size=sf[0].file_size) for dig, sf in self.hexdigest_to_snapshotfiles.items() if sf ] def", "logger.debug(\"#%d. new file: %r\", i - disappeared, relative_path) changes +=", "dst_files = self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): #", "Then, create/update corresponding snapshotfile objects (old # ones were already", "The hash on disk MAY change, which may require subsequent", "# files within). In that case, there is little point", "may require subsequent incremential snapshot and-or ignoring the files which", "= {p.parent for p in files} return sorted(dirs), files def", "snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile: same += 1", "need to be built-in to Snapshotter, but having it there", "= old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile: same += 1 if", "def get_snapshot_hashes(self): assert self.lock.locked() return [ SnapshotHash(hexdigest=dig, size=sf[0].file_size) for dig,", "def get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *,", "logger.debug(\"#%d. new directory: %r\", i, relative_dir) changes += 1 return", "as list of other hashes which correspond to files referred", "is started twice at # same time. While it is", "of other hashes which correspond to files referred to within", "for path in basepath.glob(glob): if not path.is_file() or path.is_symlink(): continue", "_get_snapshot_hash_list(self, relative_paths): same = 0 lost = 0 for relative_path", "changes def _snapshot_add_missing_files(self, *, src_files, dst_files): existing = 0 disappeared", "just use the src # directory contents as-is. dst_dirs, dst_files", "increase_worth_reporting(disappeared): logger.debug(\"#%d. %s disappeared before linking, ignoring\", disappeared, src_path) continue", "them self.src = Path(src) self.dst = Path(dst) self.globs = globs", "root object. Note that any call to public API MUST", "logger.debug(\"#%d. %s already existed, ignoring\", existing, src_path) continue except FileNotFoundError:", "or configuration must supply them self.src = Path(src) self.dst =", "it there enables asserting its state during public API calls.", "relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path = self.src / relative_path dst_path", "def _snapshot_remove_extra_files(self, *, src_files, dst_files): changes = 0 for i,", "os.link(src=src_path, dst=dst_path, follow_symlinks=False) except FileExistsError: # This happens only if", "files def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if", "[]).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del", "snapshotfile: same += 1 if increase_worth_reporting(same): logger.debug(\"#%d. same - %r", "than leave # exceptions not handled. existing += 1 if", "already removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles))", "exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\", i, relative_dir) changes", "utils.parallel_map_to(iterable=snapshotfiles, fun=_cb, result_callback=_result_cb, n=self.parallel) # We initially started with 1", "should be used if and only if it is #", "in enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst / relative_path snapshotfile =", "snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest:", "snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f) return snapshotfile def", "itself might not need to be built-in to Snapshotter, but", "1 return changes def _snapshot_add_missing_files(self, *, src_files, dst_files): existing =", "to ignored files it # actually might not even work.", "MAY change, which may require subsequent incremential snapshot and-or ignoring", "def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] == snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path]", "# exceptions not handled. existing += 1 if increase_worth_reporting(existing): logger.debug(\"#%d.", "= src_dirs, src_files else: progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) #", "snapshotfile def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return True changes", "existing += 1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed, ignoring\",", "Progress() src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1) if self.src == self.dst:", "with snapshotfile.open_for_reading(self.dst) as f: if snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 =", "self.src / relative_path st = src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size)", "rather handle it here than leave # exceptions not handled.", "i, relative_path in enumerate(set(src_files).difference(dst_files), 1): src_path = self.src / relative_path", "objects (old # ones were already removed) dst_dirs, dst_files =", "from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting,", "2020 Aiven Ltd See LICENSE for details \"\"\" from astacus.common", "snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest, []).append(snapshotfile) def _remove_snapshotfile(self, snapshotfile: SnapshotFile): assert self.relative_path_to_snapshotfile[snapshotfile.relative_path] ==", "self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new directory:", "shared across operations, possibly used from multiple threads, and the", "between snapshot and # upload steps (e.g. Astacus controls the", "return snapshotfile def _result_cb(*, map_in, map_out): self._add_snapshotfile(map_out) progress.add_success() return True", "old running' is intentional feature but new operation should eventually", "there enables asserting its state during public API calls. \"\"\"", "ignored files it # actually might not even work. #", "While it is technically speaking upstream # error, we rather", "Create missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove", "= 0 for relative_path in relative_paths: old_snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) try:", "if sf ] def get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values()))", "built-in to Snapshotter, but having it there enables asserting its", "snapshot and-or ignoring the files which have changed. The output", "if and only if it is # known that files", "dst is present as it is in snapshot with snapshotfile.open_for_reading(self.dst)", "the # files within). In that case, there is little", "snapshotfile.file_size <= magic.EMBEDDED_FILE_SIZE: snapshotfile.content_b64 = base64.b64encode(f.read()).decode() else: snapshotfile.hexdigest = hash_hexdigest_readable(f)", "threading logger = logging.getLogger(__name__) _hash = hashlib.blake2s def hash_hexdigest_readable(f, *,", "operation can be started with old running' is intentional feature", "not exactly flawless (the 'new operation can be started with", "= self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new", "src_path = self.src / relative_path dst_path = self.dst / relative_path", "continue if increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new file: %r\", i", "/ relative_dir dst_path.mkdir(parents=True, exist_ok=True) if increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\",", "relative_path) continue yield snapshotfile def get_snapshot_hashes(self): assert self.lock.locked() return [", "output to outside is just root object's hash, as well", "before linking, ignoring\", disappeared, src_path) continue if increase_worth_reporting(i - disappeared):", "get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self, *, src_dirs,", "steps (e.g. Astacus controls the lifecycle of the # files", "from astacus.common.progress import increase_worth_reporting, Progress from pathlib import Path from", "changed. The output to outside is just root object's hash,", "or path.is_symlink(): continue relpath = path.relative_to(basepath) for parent in relpath.parents:", "Progress from pathlib import Path from typing import Optional import", "dst, globs, parallel): assert globs # model has empty; either", "for parent in relpath.parents: if parent.name == magic.ASTACUS_TMPDIR: break else:", "increase_worth_reporting(i): logger.debug(\"#%d. extra file: %r\", i, relative_path) changes += 1", "that files will not disappear between snapshot and # upload", "time. While it is technically speaking upstream # error, we", "_snapshot_create_missing_directories(self, *, src_dirs, dst_dirs): changes = 0 for i, relative_dir", "which correspond to files referred to within the file list", "] def get_snapshot_state(self): assert self.lock.locked() return SnapshotState(root_globs=self.globs, files=sorted(self.relative_path_to_snapshotfile.values())) def _snapshot_create_missing_directories(self,", "has empty; either plugin or configuration must supply them self.src", "h.hexdigest() class Snapshotter: \"\"\"Snapshotter keeps track of files on disk,", "= Progress() src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1) if self.src ==", "relative_dir in enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst / relative_dir dst_path.mkdir(parents=True,", "self._list_dirs_and_files(self.dst) # Create missing directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success()", "and due to ignored files it # actually might not", "in root object. Note that any call to public API", "(e.g. Astacus controls the lifecycle of the # files within).", "state during public API calls. \"\"\" def __init__(self, *, src,", "def _add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile:", "used from multiple threads, and the single-operation-only mode of operation", "the src # directory contents as-is. dst_dirs, dst_files = src_dirs,", "progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) # Create missing directories changes", "We COULD also remove extra directories, but it is not", "logger.debug(\"#%d. lost - %s disappeared before stat, ignoring\", lost, self.src", "threads, and the single-operation-only mode of operation is not exactly", "supply them self.src = Path(src) self.dst = Path(dst) self.globs =", "public API calls. \"\"\" def __init__(self, *, src, dst, globs,", "1): src_path = self.src / relative_path dst_path = self.dst /", "None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] = snapshotfile if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles.setdefault(snapshotfile.hexdigest,", "increase_worth_reporting(same): logger.debug(\"#%d. same - %r in %s is same\", same,", "Astacus controls the lifecycle of the # files within). In", "1 if increase_worth_reporting(lost): logger.debug(\"#%d. lost - %s disappeared before stat,", "snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src may or", "sf in self.hexdigest_to_snapshotfiles.items() if sf ] def get_snapshot_state(self): assert self.lock.locked()", "other hashes which correspond to files referred to within the", "src, dst, globs, parallel): assert globs # model has empty;", "break else: result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self, basepath: Path): files", "assert self.lock.locked() if progress is None: progress = Progress() src_dirs,", "__init__(self, *, src, dst, globs, parallel): assert globs # model", "relative_path) changes += 1 return changes def _snapshot_add_missing_files(self, *, src_files,", "= threading.Lock() def _list_files(self, basepath: Path): result_files = set() for", "base64 import hashlib import logging import os import threading logger", "= 0 lost = 0 for relative_path in relative_paths: old_snapshotfile", "running' is intentional feature but new operation should eventually replace", "extra file: %r\", i, relative_path) changes += 1 return changes", "SnapshotHash, SnapshotState from astacus.common.progress import increase_worth_reporting, Progress from pathlib import", "referred to within the file list contained in root object.", "but having it there enables asserting its state during public", "disappear between snapshot and # upload steps (e.g. Astacus controls", "import Optional import base64 import hashlib import logging import os", "os import threading logger = logging.getLogger(__name__) _hash = hashlib.blake2s def", "1 if increase_worth_reporting(existing): logger.debug(\"#%d. %s already existed, ignoring\", existing, src_path)", "probably really worth it and due to ignored files it", "that case, there is little point in # making extra", "missing files changes += self._snapshot_add_missing_files(src_files=src_files, dst_files=dst_files) progress.add_success() # We COULD", "h = _hash() while True: data = f.read(read_buffer) if not", "snapshotfile del self.relative_path_to_snapshotfile[snapshotfile.relative_path] if snapshotfile.hexdigest: self.hexdigest_to_snapshotfiles[snapshotfile.hexdigest].remove(snapshotfile) def _snapshotfile_from_path(self, relative_path): src_path", "0 for i, relative_path in enumerate(set(dst_files).difference(src_files), 1): dst_path = self.dst", "/ relative_path snapshotfile = self.relative_path_to_snapshotfile.get(relative_path) if snapshotfile: self._remove_snapshotfile(snapshotfile) dst_path.unlink() if", "progress.add_success() # We COULD also remove extra directories, but it", "increase_worth_reporting(i): logger.debug(\"#%d. new directory: %r\", i, relative_dir) changes += 1", "= old_snapshotfile.hexdigest snapshotfile.content_b64 = old_snapshotfile.content_b64 if old_snapshotfile == snapshotfile: same", "file: %r\", i - disappeared, relative_path) changes += 1 return", "astacus.common.progress import increase_worth_reporting, Progress from pathlib import Path from typing", "{} self.parallel = parallel self.lock = threading.Lock() def _list_files(self, basepath:", "old). The lock itself might not need to be built-in", "- disappeared, relative_path) changes += 1 return changes def snapshot(self,", "file: %r\", i, relative_path) changes += 1 return changes def", "src_path.stat() return SnapshotFile(relative_path=relative_path, mtime_ns=st.st_mtime_ns, file_size=st.st_size) def _get_snapshot_hash_list(self, relative_paths): same =", "dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile):", "= list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def _cb(snapshotfile): # src may or may", "src_dirs, src_files = self._list_dirs_and_files(self.src) progress.start(1) if self.src == self.dst: #", "changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove extra files changes", "COULD also remove extra directories, but it is not #", "= parallel self.lock = threading.Lock() def _list_files(self, basepath: Path): result_files", "Snapshotter, but having it there enables asserting its state during", "SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path] =", "directories changes = self._snapshot_create_missing_directories(src_dirs=src_dirs, dst_dirs=dst_dirs) progress.add_success() # Remove extra files", "src_dirs, dst_dirs): changes = 0 for i, relative_dir in enumerate(set(src_dirs).difference(dst_dirs),", "dst_files = src_dirs, src_files else: progress.add_total(3) dst_dirs, dst_files = self._list_dirs_and_files(self.dst)", "multiple threads, and the single-operation-only mode of operation is not", "will not disappear between snapshot and # upload steps (e.g.", "= _hash() while True: data = f.read(read_buffer) if not data:", "magic.ASTACUS_TMPDIR: break else: result_files.add(relpath) return sorted(result_files) def _list_dirs_and_files(self, basepath: Path):", "In that case, there is little point in # making", "+= self._snapshot_remove_extra_files(src_files=src_files, dst_files=dst_files) progress.add_success() # Add missing files changes +=", "having it there enables asserting its state during public API", "Ltd See LICENSE for details \"\"\" from astacus.common import magic,", "logger.debug(\"#%d. extra file: %r\", i, relative_path) changes += 1 return", "+= 1 return changes def _snapshot_remove_extra_files(self, *, src_files, dst_files): changes", "src=dst mode should be used if and only if it", "src_path) continue if increase_worth_reporting(i - disappeared): logger.debug(\"#%d. new file: %r\",", "p in files} return sorted(dirs), files def _add_snapshotfile(self, snapshotfile: SnapshotFile):", "enumerate(set(src_dirs).difference(dst_dirs), 1): dst_path = self.dst / relative_dir dst_path.mkdir(parents=True, exist_ok=True) if", "made with snapshotter.lock held. This is because Snapshotter is process-wide", "self.src = Path(src) self.dst = Path(dst) self.globs = globs self.relative_path_to_snapshotfile", "def snapshot(self, *, progress: Optional[Progress] = None): assert self.lock.locked() if", "the old). The lock itself might not need to be", "lost, self.src / relative_path) continue if old_snapshotfile: snapshotfile.hexdigest = old_snapshotfile.hexdigest", "changes def _snapshot_remove_extra_files(self, *, src_files, dst_files): changes = 0 for", "def _list_dirs_and_files(self, basepath: Path): files = self._list_files(basepath) dirs = {p.parent", "mode of operation is not exactly flawless (the 'new operation", "= path.relative_to(basepath) for parent in relpath.parents: if parent.name == magic.ASTACUS_TMPDIR:", "= self._list_files(basepath) dirs = {p.parent for p in files} return", "= {} self.parallel = parallel self.lock = threading.Lock() def _list_files(self,", "1 if increase_worth_reporting(same): logger.debug(\"#%d. same - %r in %s is", "Copyright (c) 2020 Aiven Ltd See LICENSE for details \"\"\"", "pathlib import Path from typing import Optional import base64 import", "self.dst: # The src=dst mode should be used if and", "_add_snapshotfile(self, snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile)", "is little point in # making extra symlinks and we", "it is not # probably really worth it and due", "well as list of other hashes which correspond to files", "%s disappeared before linking, ignoring\", disappeared, src_path) continue if increase_worth_reporting(i", "removed) dst_dirs, dst_files = self._list_dirs_and_files(self.dst) snapshotfiles = list(self._get_snapshot_hash_list(dst_files)) progress.add_total(len(snapshotfiles)) def", "present as it is in snapshot with snapshotfile.open_for_reading(self.dst) as f:", "except FileExistsError: # This happens only if snapshot is started", "snapshotfile: SnapshotFile): old_snapshotfile = self.relative_path_to_snapshotfile.get(snapshotfile.relative_path, None) if old_snapshotfile: self._remove_snapshotfile(old_snapshotfile) self.relative_path_to_snapshotfile[snapshotfile.relative_path]", "it here than leave # exceptions not handled. existing +=", "if not data: break h.update(data) return h.hexdigest() class Snapshotter: \"\"\"Snapshotter", "def __init__(self, *, src, dst, globs, parallel): assert globs #", "astacus.common import magic, utils from astacus.common.ipc import SnapshotFile, SnapshotHash, SnapshotState" ]
[ "for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename = os.path.basename(jar) shutil.copy2(jar, os.path.join(dst_package_jar_dir,", "tree but not in the build tree bld_package_jar_dir = os.path.join(args.build_base,", "return rc.returncode if not skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async", "bld_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in", "[args.gradle_task] else: cmd += ['assemble'] # Gradle Arguments cmd +=", "the build process dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle Executable if", "import has_wrapper_executable logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\"", "if rc and rc.returncode: return rc.returncode if not skip_hook_creation: create_environment_scripts(", "from colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.task", "import run from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable", "out_dirnames = set() out_filenames = set() for dirname, dirnames, filenames", "classes to CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook(", "src_dirnames prune_filenames = dst_filenames - src_filenames for prune_filename in prune_filenames:", "= args.install_base # invoke build step return await run( self.context,", "= [] # add jars and classes to CLASSPATH with", "Gradle Executable if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is", "# remove anything on the destination tree but not in", "+= [args.gradle_task] else: cmd += ['assemble'] # Gradle Arguments cmd", "src_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in", "build tree bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir = os.path.join(", "= self.context.pkg # remove anything on the destination tree but", "self.context.args logger.info( \"Building Gradle package in '{args.path}'\".format_map(locals())) if additional_hooks is", "src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir)", "update=1) # Gradle Executable if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif", "self.context.pkg # remove anything on the destination tree but not", "os.path.join( args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames =", "env = await get_command_environment( 'build', args.build_base, self.context.dependencies) except RuntimeError as", "set() for dirname, dirnames, filenames in os.walk(start_path): for subdirname in", "https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name,", "rc and rc.returncode: return rc.returncode rc = await self._install(args, env)", "nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to Gradle projects. ' 'Arguments", "build step return await run( self.context, cmd, cwd=args.build_base, env=env) async", "pkg, args, additional_hooks=additional_hooks) async def _build(self, args, env): self.progress('build') #", "is not None: cmd = [GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could", "__init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path):", "'e.g. --gradle-args \" --help\"') parser.add_argument( '--gradle-task', help='Run a specific task", "# add jars and classes to CLASSPATH with wildcards #", "with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name,", "colcon_core.task import run from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import", "out_filenames) def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--gradle-args',", "os.path.join('share', pkg.name, 'java', '*'), mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base),", "dst_package_jar_dir) prune_dirnames = dst_dirnames - bld_dirnames prune_filenames = dst_filenames -", "= dst_dirnames - bld_dirnames prune_filenames = dst_filenames - bld_filenames for", "in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for filename in", "package in '{args.path}'\".format_map(locals())) if additional_hooks is None: additional_hooks = []", "prune_filenames = dst_filenames - src_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir,", "cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not None: cmd =", "the latter during the build process dir_util.copy_tree(args.path, args.build_base, update=1) #", "= os.path.join( args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames", "os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files from the source directory", "GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint):", "self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames - bld_dirnames prune_filenames = dst_filenames", "async def build( # noqa: D102 self, *, additional_hooks=None, skip_hook_creation=False", "distutils import dir_util import glob import os from pathlib import", "for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for", "exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree(", "for dirname, dirnames, filenames in os.walk(start_path): for subdirname in dirnames:", "parser): # noqa: D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass", "os.path.join('share', pkg.name, 'java'), mode='prepend') try: env = await get_command_environment( 'build',", "src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames", "and rc.returncode: return rc.returncode rc = await self._install(args, env) if", "the default task') async def build( # noqa: D102 self,", "'*'), mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share',", "- bld_dirnames prune_filenames = dst_filenames - bld_filenames for prune_filename in", "get_command_environment from colcon_core.task import run from colcon_core.task import TaskExtensionPoint from", "start=start_path)) for filename in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path))", "import Path import shutil from colcon_core.environment import create_environment_scripts from colcon_core.logging", "run from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from", "os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir = os.path.join( args.install_base, 'share', pkg.name, 'java')", "\" --help\"') parser.add_argument( '--gradle-task', help='Run a specific task instead of", "the source tree src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base,", "in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname):", "args.build_base, update=1) # Gradle Executable if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())]", "as e: logger.error(str(e)) return 1 rc = await self._build(args, env)", "cmd = [GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could not find 'gradle'", "= dst_dirnames - src_dirnames prune_filenames = dst_filenames - src_filenames for", "'src') src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree(", "directory to the build one to avoid # polluting the", "not None: cmd = [GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could not", "def add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--gradle-args', nargs='*',", "\"\"\"Build gradle packages.\"\"\" def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION,", "<NAME> # Licensed under the Apache License, Version 2.0 from", "_build_file_tree(self, start_path): out_dirnames = set() out_filenames = set() for dirname,", "but not in the source tree src_package_src_dir = os.path.join(args.path, 'src')", "'java', '*'), mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH',", "dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames - src_dirnames", "Gradle Task (by default 'assemble') if args.gradle_task: cmd += [args.gradle_task]", "['--stacktrace'] # Add install_base to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] =", "environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke build step", "2.0 from distutils import dir_util import glob import os from", "in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke build step return", "'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend') try: env", "Path import shutil from colcon_core.environment import create_environment_scripts from colcon_core.logging import", "env) if rc and rc.returncode: return rc.returncode rc = await", "shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files from the source directory to", "None: cmd = [GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could not find", "- src_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname", "bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir = os.path.join( args.install_base, 'share',", "in '{args.path}'\".format_map(locals())) if additional_hooks is None: additional_hooks = [] #", "matching other options must be prefixed by a space,\\n' 'e.g.", "os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames =", "dst_filenames - src_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for", "import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__) class", "from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle", "args = self.context.args logger.info( \"Building Gradle package in '{args.path}'\".format_map(locals())) if", "= os.path.join(args.build_base, 'src') src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames", "import get_command_environment from colcon_core.task import run from colcon_core.task import TaskExtensionPoint", "args, additional_hooks=additional_hooks) async def _build(self, args, env): self.progress('build') # remove", "from colcon_core.plugin_system import satisfies_version from colcon_core.shell import create_environment_hook from colcon_core.shell", "get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger", "rc = await self._install(args, env) if rc and rc.returncode: return", "- src_dirnames prune_filenames = dst_filenames - src_filenames for prune_filename in", "dst_dirnames - bld_dirnames prune_filenames = dst_filenames - bld_filenames for prune_filename", "pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames,", "'--gradle-task', help='Run a specific task instead of the default task')", "files from the source directory to the build one to", "additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java',", "= self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames - src_dirnames prune_filenames =", "self._install(args, env) if rc and rc.returncode: return rc.returncode if not", "source tree src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base, 'src')", "has_wrapper_executable logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def", "instead of the default task') async def build( # noqa:", "os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames, src_filenames = self._build_file_tree(", "colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.shell import", "RuntimeError( \"Could not find 'gradle' or 'wrapper' executable\") # Gradle", "if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')):", "and classes to CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks +=", "and rc.returncode: return rc.returncode if not skip_hook_creation: create_environment_scripts( pkg, args,", "skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async def _build(self, args, env):", "to the build one to avoid # polluting the latter", "Version 2.0 from distutils import dir_util import glob import os", "from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger =", "additional_hooks = [] # add jars and classes to CLASSPATH", "rc.returncode: return rc.returncode rc = await self._install(args, env) if rc", "colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import", "from colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build", "def _install(self, args, env): self.progress('install') pkg = self.context.pkg # remove", "Apache License, Version 2.0 from distutils import dir_util import glob", "tree bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir = os.path.join( args.install_base,", "if args.gradle_task: cmd += [args.gradle_task] else: cmd += ['assemble'] #", "def _build_file_tree(self, start_path): out_dirnames = set() out_filenames = set() for", "avoid # polluting the latter during the build process dir_util.copy_tree(args.path,", "has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not None: cmd", "be prefixed by a space,\\n' 'e.g. --gradle-args \" --help\"') parser.add_argument(", "+= create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend')", "Copyright 2018 <NAME> # Licensed under the Apache License, Version", "dirnames, filenames in os.walk(start_path): for subdirname in dirnames: out_dirnames.add( os.path.relpath(", "rc = await self._build(args, env) if rc and rc.returncode: return", "= self.context.args logger.info( \"Building Gradle package in '{args.path}'\".format_map(locals())) if additional_hooks", "--help\"') parser.add_argument( '--gradle-task', help='Run a specific task instead of the", "satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path): out_dirnames = set() out_filenames =", "args.gradle_task: cmd += [args.gradle_task] else: cmd += ['assemble'] # Gradle", "'libs') dst_package_jar_dir = os.path.join( args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True)", "projects. ' 'Arguments matching other options must be prefixed by", "add_arguments(self, *, parser): # noqa: D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*',", "subdirname), start=start_path)) for filename in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename),", "prune_filenames = dst_filenames - bld_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir,", "['assemble'] # Gradle Arguments cmd += (args.gradle_args or []) cmd", "out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path)) return (out_dirnames, out_filenames) def add_arguments(self,", "args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return 1 rc", "'build', 'libs') dst_package_jar_dir = os.path.join( args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir,", "env): self.progress('install') pkg = self.context.pkg # remove anything on the", "else: raise RuntimeError( \"Could not find 'gradle' or 'wrapper' executable\")", "prune_dirnames = dst_dirnames - bld_dirnames prune_filenames = dst_filenames - bld_filenames", "to avoid # polluting the latter during the build process", "from distutils import dir_util import glob import os from pathlib", "return (out_dirnames, out_filenames) def add_arguments(self, *, parser): # noqa: D102", "= self.context.pkg args = self.context.args logger.info( \"Building Gradle package in", "GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def __init__(self): # noqa: D107 super().__init__()", "in the build tree bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir", "from colcon_core.shell import get_command_environment from colcon_core.task import run from colcon_core.task", "space,\\n' 'e.g. --gradle-args \" --help\"') parser.add_argument( '--gradle-task', help='Run a specific", "mode='prepend') try: env = await get_command_environment( 'build', args.build_base, self.context.dependencies) except", "# noqa: D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments", "pkg.name, 'java', '*'), mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name,", "cwd=args.build_base, env=env) async def _install(self, args, env): self.progress('install') pkg =", "out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for filename in filenames: out_filenames.add(", "import create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.task import run", "# Gradle Task (by default 'assemble') if args.gradle_task: cmd +=", "return 1 rc = await self._build(args, env) if rc and", "Task (by default 'assemble') if args.gradle_task: cmd += [args.gradle_task] else:", "return await run( self.context, cmd, cwd=args.build_base, env=env) async def _install(self,", "import create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version", "import dir_util import glob import os from pathlib import Path", "jars and classes to CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks", "1 rc = await self._build(args, env) if rc and rc.returncode:", "self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames", "remove anything on the destination tree but not in the", "dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames - bld_dirnames prune_filenames", "try: env = await get_command_environment( 'build', args.build_base, self.context.dependencies) except RuntimeError", "the Apache License, Version 2.0 from distutils import dir_util import", "\"Could not find 'gradle' or 'wrapper' executable\") # Gradle Task", "): pkg = self.context.pkg args = self.context.args logger.info( \"Building Gradle", "filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path)) return (out_dirnames, out_filenames) def", "mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name,", "self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return 1 rc =", "if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not None:", "if rc and rc.returncode: return rc.returncode rc = await self._install(args,", "'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'), mode='prepend') additional_hooks", "cmd += ['assemble'] # Gradle Arguments cmd += (args.gradle_args or", "'^1.0') def _build_file_tree(self, start_path): out_dirnames = set() out_filenames = set()", "get_command_environment( 'build', args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return", "on the destination tree but not in the build tree", "build one to avoid # polluting the latter during the", "None: additional_hooks = [] # add jars and classes to", "one to avoid # polluting the latter during the build", "os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for filename in filenames: out_filenames.add( os.path.relpath(", "not find 'gradle' or 'wrapper' executable\") # Gradle Task (by", "D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path): out_dirnames = set()", "cmd += ['--stacktrace'] # Add install_base to environment in GRADLE_INSTALL_PREFIX", "= colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def __init__(self): #", "args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree(", "not in the source tree src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir", "for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in prune_dirnames:", "# Add install_base to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base", "= [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not None: cmd = [GRADLE_EXECUTABLE]", "if additional_hooks is None: additional_hooks = [] # add jars", "anything on the destination tree but not in the build", "pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend') try: env = await", "latter during the build process dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle", "not skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async def _build(self, args,", "Gradle package in '{args.path}'\".format_map(locals())) if additional_hooks is None: additional_hooks =", "env) if rc and rc.returncode: return rc.returncode if not skip_hook_creation:", "options must be prefixed by a space,\\n' 'e.g. --gradle-args \"", "from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle", "os from pathlib import Path import shutil from colcon_core.environment import", "from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.shell", "return rc.returncode rc = await self._install(args, env) if rc and", "dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames - src_dirnames prune_filenames", "os.path.join(args.build_base, 'src') src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames =", "self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames", "copy files from the source directory to the build one", "noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path): out_dirnames =", "from colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system", "pathlib import Path import shutil from colcon_core.environment import create_environment_scripts from", "(out_dirnames, out_filenames) def add_arguments(self, *, parser): # noqa: D102 parser.add_argument(", "except RuntimeError as e: logger.error(str(e)) return 1 rc = await", "src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames -", "env): self.progress('build') # remove anything on the destination tree but", "= [GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could not find 'gradle' or", "colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle", "elif GRADLE_EXECUTABLE is not None: cmd = [GRADLE_EXECUTABLE] else: raise", "# https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share',", "additional_hooks += create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'),", "dst_package_src_dir) prune_dirnames = dst_dirnames - src_dirnames prune_filenames = dst_filenames -", "'Arguments matching other options must be prefixed by a space,\\n'", "noqa: D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to", "\"Building Gradle package in '{args.path}'\".format_map(locals())) if additional_hooks is None: additional_hooks", "build process dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle Executable if has_wrapper_executable(args):", "pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'), mode='prepend') additional_hooks += create_environment_hook(", "'--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to Gradle projects. '", "but not in the build tree bld_package_jar_dir = os.path.join(args.build_base, 'build',", "dst_package_jar_dir = os.path.join( args.install_base, 'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames,", "= self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames =", "[str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not None: cmd = [GRADLE_EXECUTABLE] else:", "GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke build step return await", "# Gradle Arguments cmd += (args.gradle_args or []) cmd +=", "prefixed by a space,\\n' 'e.g. --gradle-args \" --help\"') parser.add_argument( '--gradle-task',", "_build(self, args, env): self.progress('build') # remove anything on the destination", "wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH',", "for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in prune_dirnames:", "'share', pkg.name, 'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir)", "satisfies_version from colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment from", "# copy files from the source directory to the build", "'assemble') if args.gradle_task: cmd += [args.gradle_task] else: cmd += ['assemble']", "from colcon_core.task import run from colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle", "# noqa: D102 self, *, additional_hooks=None, skip_hook_creation=False ): pkg =", "await self._build(args, env) if rc and rc.returncode: return rc.returncode rc", "create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async def _build(self, args, env): self.progress('build')", "executable\") # Gradle Task (by default 'assemble') if args.gradle_task: cmd", "prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in prune_dirnames: if", "os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename", "prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(", "+= (args.gradle_args or []) cmd += ['--stacktrace'] # Add install_base", "def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self,", "bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames -", "in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname):", "[GRADLE_EXECUTABLE] else: raise RuntimeError( \"Could not find 'gradle' or 'wrapper'", "+= ['assemble'] # Gradle Arguments cmd += (args.gradle_args or [])", "start_path): out_dirnames = set() out_filenames = set() for dirname, dirnames,", "else: cmd += ['assemble'] # Gradle Arguments cmd += (args.gradle_args", "the destination tree but not in the build tree bld_package_jar_dir", "dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for filename in filenames:", "for filename in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path)) return", "CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars', Path(args.install_base),", "prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) #", "in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files from", "def _build(self, args, env): self.progress('build') # remove anything on the", "the source directory to the build one to avoid #", "import satisfies_version from colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment", "import shutil from colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger", "'{args.path}'\".format_map(locals())) if additional_hooks is None: additional_hooks = [] # add", "logger.error(str(e)) return 1 rc = await self._build(args, env) if rc", "dst_dirnames - src_dirnames prune_filenames = dst_filenames - src_filenames for prune_filename", "# Copyright 2018 <NAME> # Licensed under the Apache License,", "pkg.name, 'java'), mode='prepend') try: env = await get_command_environment( 'build', args.build_base,", "metavar='*', type=str.lstrip, help='Pass arguments to Gradle projects. ' 'Arguments matching", "self._build_file_tree( dst_package_src_dir) prune_dirnames = dst_dirnames - src_dirnames prune_filenames = dst_filenames", "= set() for dirname, dirnames, filenames in os.walk(start_path): for subdirname", "prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files from the", "colcon_core.task import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import", "prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname))", "cmd += [args.gradle_task] else: cmd += ['assemble'] # Gradle Arguments", "D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to Gradle", "+= ['--stacktrace'] # Add install_base to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX']", "--gradle-args \" --help\"') parser.add_argument( '--gradle-task', help='Run a specific task instead", "step return await run( self.context, cmd, cwd=args.build_base, env=env) async def", "in os.walk(start_path): for subdirname in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname),", "import glob import os from pathlib import Path import shutil", "in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar in", "set() out_filenames = set() for dirname, dirnames, filenames in os.walk(start_path):", "'build', args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e)) return 1", "gradle packages.\"\"\" def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0')", "dst_filenames - bld_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for", "= os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames, src_filenames =", "_install(self, args, env): self.progress('install') pkg = self.context.pkg # remove anything", "polluting the latter during the build process dir_util.copy_tree(args.path, args.build_base, update=1)", "for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy", "logger = colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def __init__(self):", "noqa: D102 self, *, additional_hooks=None, skip_hook_creation=False ): pkg = self.context.pkg", "not in the build tree bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs')", "= self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames - bld_dirnames prune_filenames =", "args, env): self.progress('install') pkg = self.context.pkg # remove anything on", "(args.gradle_args or []) cmd += ['--stacktrace'] # Add install_base to", "import os from pathlib import Path import shutil from colcon_core.environment", "D102 self, *, additional_hooks=None, skip_hook_creation=False ): pkg = self.context.pkg args", "destination tree but not in the build tree bld_package_jar_dir =", "additional_hooks is None: additional_hooks = [] # add jars and", "2018 <NAME> # Licensed under the Apache License, Version 2.0", "dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle Executable if has_wrapper_executable(args): cmd =", "glob import os from pathlib import Path import shutil from", "other options must be prefixed by a space,\\n' 'e.g. --gradle-args", "Gradle projects. ' 'Arguments matching other options must be prefixed", "= await self._build(args, env) if rc and rc.returncode: return rc.returncode", "by a space,\\n' 'e.g. --gradle-args \" --help\"') parser.add_argument( '--gradle-task', help='Run", "or 'wrapper' executable\") # Gradle Task (by default 'assemble') if", "'java'), mode='prepend') try: env = await get_command_environment( 'build', args.build_base, self.context.dependencies)", "self.progress('build') # remove anything on the destination tree but not", "must be prefixed by a space,\\n' 'e.g. --gradle-args \" --help\"')", "src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames, src_filenames", "or []) cmd += ['--stacktrace'] # Add install_base to environment", "- bld_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname", "additional_hooks=additional_hooks) async def _build(self, args, env): self.progress('build') # remove anything", "invoke build step return await run( self.context, cmd, cwd=args.build_base, env=env)", "'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend') try: env = await get_command_environment(", "a space,\\n' 'e.g. --gradle-args \" --help\"') parser.add_argument( '--gradle-task', help='Run a", "prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir,", "super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path): out_dirnames = set() out_filenames", "out_filenames = set() for dirname, dirnames, filenames in os.walk(start_path): for", "Add install_base to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base #", "'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'), mode='prepend') additional_hooks += create_environment_hook( 'classpath_classes',", "the destination tree but not in the source tree src_package_src_dir", "filename), start=start_path)) return (out_dirnames, out_filenames) def add_arguments(self, *, parser): #", "cmd += (args.gradle_args or []) cmd += ['--stacktrace'] # Add", "from pathlib import Path import shutil from colcon_core.environment import create_environment_scripts", "of the default task') async def build( # noqa: D102", "async def _install(self, args, env): self.progress('install') pkg = self.context.pkg #", "from the source directory to the build one to avoid", "= set() out_filenames = set() for dirname, dirnames, filenames in", "a specific task instead of the default task') async def", "<filename>colcon_gradle/task/gradle/build.py<gh_stars>0 # Copyright 2018 <NAME> # Licensed under the Apache", "os.path.join(dirname, filename), start=start_path)) return (out_dirnames, out_filenames) def add_arguments(self, *, parser):", "self.context.pkg args = self.context.args logger.info( \"Building Gradle package in '{args.path}'\".format_map(locals()))", "# polluting the latter during the build process dir_util.copy_tree(args.path, args.build_base,", "colcon_logger.getChild(__name__) class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def __init__(self): # noqa:", "start=start_path)) return (out_dirnames, out_filenames) def add_arguments(self, *, parser): # noqa:", "os.path.join(dst_package_jar_dir, prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename = os.path.basename(jar)", "on the destination tree but not in the source tree", "for subdirname in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for", "TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from", "[]) cmd += ['--stacktrace'] # Add install_base to environment in", "colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system import", "help='Pass arguments to Gradle projects. ' 'Arguments matching other options", "tree src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames,", "parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip, help='Pass arguments to Gradle projects.", "(by default 'assemble') if args.gradle_task: cmd += [args.gradle_task] else: cmd", "= await get_command_environment( 'build', args.build_base, self.context.dependencies) except RuntimeError as e:", "await get_command_environment( 'build', args.build_base, self.context.dependencies) except RuntimeError as e: logger.error(str(e))", "# noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def _build_file_tree(self, start_path): out_dirnames", "os.path.join(dirname, subdirname), start=start_path)) for filename in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname,", "dirname, dirnames, filenames in os.walk(start_path): for subdirname in dirnames: out_dirnames.add(", "[] # add jars and classes to CLASSPATH with wildcards", "tree but not in the source tree src_package_src_dir = os.path.join(args.path,", "build( # noqa: D102 self, *, additional_hooks=None, skip_hook_creation=False ): pkg", "colcon_core.shell import create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.task import", "+= create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'),", "self.progress('install') pkg = self.context.pkg # remove anything on the destination", "shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename =", "colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable logger = colcon_logger.getChild(__name__)", "to CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762 additional_hooks += create_environment_hook( 'classpath_jars',", "dir_util import glob import os from pathlib import Path import", "= os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir = os.path.join( args.install_base, 'share', pkg.name,", "to Gradle projects. ' 'Arguments matching other options must be", "create_environment_hook( 'classpath_jars', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'), mode='prepend')", "prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar", "rc and rc.returncode: return rc.returncode if not skip_hook_creation: create_environment_scripts( pkg,", "task') async def build( # noqa: D102 self, *, additional_hooks=None,", "def build( # noqa: D102 self, *, additional_hooks=None, skip_hook_creation=False ):", "env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke build step return await run(", "import colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.shell import create_environment_hook", "filenames in os.walk(start_path): for subdirname in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname,", "destination tree but not in the source tree src_package_src_dir =", "Gradle Arguments cmd += (args.gradle_args or []) cmd += ['--stacktrace']", "raise RuntimeError( \"Could not find 'gradle' or 'wrapper' executable\") #", "to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke build", "self.context, cmd, cwd=args.build_base, env=env) async def _install(self, args, env): self.progress('install')", "pkg = self.context.pkg # remove anything on the destination tree", "jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename = os.path.basename(jar) shutil.copy2(jar, os.path.join(dst_package_jar_dir, jar_filename))", "RuntimeError as e: logger.error(str(e)) return 1 rc = await self._build(args,", "'src') dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir)", "bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames", "colcon_core.plugin_system import satisfies_version from colcon_core.shell import create_environment_hook from colcon_core.shell import", "task instead of the default task') async def build( #", "await run( self.context, cmd, cwd=args.build_base, env=env) async def _install(self, args,", "subdirname in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path)) for filename", "if not skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async def _build(self,", "= dst_filenames - src_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename))", "shutil from colcon_core.environment import create_environment_scripts from colcon_core.logging import colcon_logger from", "rc.returncode: return rc.returncode if not skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks)", "env=env) async def _install(self, args, env): self.progress('install') pkg = self.context.pkg", "arguments to Gradle projects. ' 'Arguments matching other options must", "bld_dirnames prune_filenames = dst_filenames - bld_filenames for prune_filename in prune_filenames:", "args.install_base # invoke build step return await run( self.context, cmd,", "prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir, prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir,", "# Licensed under the Apache License, Version 2.0 from distutils", "specific task instead of the default task') async def build(", "create_environment_scripts from colcon_core.logging import colcon_logger from colcon_core.plugin_system import satisfies_version from", "Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java', '*'), mode='prepend') additional_hooks +=", "Arguments cmd += (args.gradle_args or []) cmd += ['--stacktrace'] #", "the build one to avoid # polluting the latter during", "under the Apache License, Version 2.0 from distutils import dir_util", "*, additional_hooks=None, skip_hook_creation=False ): pkg = self.context.pkg args = self.context.args", "filename in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path)) return (out_dirnames,", "# invoke build step return await run( self.context, cmd, cwd=args.build_base,", "GRADLE_EXECUTABLE is not None: cmd = [GRADLE_EXECUTABLE] else: raise RuntimeError(", "default 'assemble') if args.gradle_task: cmd += [args.gradle_task] else: cmd +=", "Licensed under the Apache License, Version 2.0 from distutils import", "self._build(args, env) if rc and rc.returncode: return rc.returncode rc =", "rc.returncode if not skip_hook_creation: create_environment_scripts( pkg, args, additional_hooks=additional_hooks) async def", "args, env): self.progress('build') # remove anything on the destination tree", "during the build process dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle Executable", "prune_dirname)) for jar in glob.glob(os.path.join(bld_package_jar_dir, '*.jar')): jar_filename = os.path.basename(jar) shutil.copy2(jar,", "License, Version 2.0 from distutils import dir_util import glob import", "cmd, cwd=args.build_base, env=env) async def _install(self, args, env): self.progress('install') pkg", "*, parser): # noqa: D102 parser.add_argument( '--gradle-args', nargs='*', metavar='*', type=str.lstrip,", "logger.info( \"Building Gradle package in '{args.path}'\".format_map(locals())) if additional_hooks is None:", "dst_package_src_dir = os.path.join(args.build_base, 'src') src_dirnames, src_filenames = self._build_file_tree( src_package_src_dir) dst_dirnames,", "'java') os.makedirs(dst_package_jar_dir, exist_ok=True) bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames", "= await self._install(args, env) if rc and rc.returncode: return rc.returncode", "= self._build_file_tree( src_package_src_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_src_dir) prune_dirnames =", "help='Run a specific task instead of the default task') async", "add jars and classes to CLASSPATH with wildcards # https://docs.oracle.com/javase/8/docs/technotes/tools/windows/classpath.html#A1100762", "source directory to the build one to avoid # polluting", "parser.add_argument( '--gradle-task', help='Run a specific task instead of the default", "process dir_util.copy_tree(args.path, args.build_base, update=1) # Gradle Executable if has_wrapper_executable(args): cmd", "anything on the destination tree but not in the source", "'wrapper' executable\") # Gradle Task (by default 'assemble') if args.gradle_task:", "if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files from the source", "skip_hook_creation=False ): pkg = self.context.pkg args = self.context.args logger.info( \"Building", "in the source tree src_package_src_dir = os.path.join(args.path, 'src') dst_package_src_dir =", "create_environment_hook( 'classpath_classes', Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend') try:", "colcon_logger from colcon_core.plugin_system import satisfies_version from colcon_core.shell import create_environment_hook from", "in filenames: out_filenames.add( os.path.relpath( os.path.join(dirname, filename), start=start_path)) return (out_dirnames, out_filenames)", "' 'Arguments matching other options must be prefixed by a", "prune_filename in prune_filenames: os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in prune_dirnames: if", "os.remove(os.path.join(dst_package_src_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname))", "prune_dirnames = dst_dirnames - src_dirnames prune_filenames = dst_filenames - src_filenames", "the build tree bld_package_jar_dir = os.path.join(args.build_base, 'build', 'libs') dst_package_jar_dir =", "self, *, additional_hooks=None, skip_hook_creation=False ): pkg = self.context.pkg args =", "packages.\"\"\" def __init__(self): # noqa: D107 super().__init__() satisfies_version(TaskExtensionPoint.EXTENSION_POINT_VERSION, '^1.0') def", "await self._install(args, env) if rc and rc.returncode: return rc.returncode if", "= dst_filenames - bld_filenames for prune_filename in prune_filenames: os.remove(os.path.join(dst_package_jar_dir, prune_filename))", "os.walk(start_path): for subdirname in dirnames: out_dirnames.add( os.path.relpath( os.path.join(dirname, subdirname), start=start_path))", "'gradle' or 'wrapper' executable\") # Gradle Task (by default 'assemble')", "Executable if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE is not", "import TaskExtensionPoint from colcon_gradle.task.gradle import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE", "colcon_core.shell import get_command_environment from colcon_core.task import run from colcon_core.task import", "class GradleBuildTask(TaskExtensionPoint): \"\"\"Build gradle packages.\"\"\" def __init__(self): # noqa: D107", "default task') async def build( # noqa: D102 self, *,", "prune_dirname)) # copy files from the source directory to the", "pkg = self.context.pkg args = self.context.args logger.info( \"Building Gradle package", "prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree(os.path.join(dst_package_src_dir, prune_dirname)) # copy files", "e: logger.error(str(e)) return 1 rc = await self._build(args, env) if", "dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir) prune_dirnames = dst_dirnames - bld_dirnames", "type=str.lstrip, help='Pass arguments to Gradle projects. ' 'Arguments matching other", "find 'gradle' or 'wrapper' executable\") # Gradle Task (by default", "Path(args.install_base), pkg.name, 'CLASSPATH', os.path.join('share', pkg.name, 'java'), mode='prepend') try: env =", "os.remove(os.path.join(dst_package_jar_dir, prune_filename)) for prune_dirname in prune_dirnames: if os.path.exists(prune_dirname): shutil.rmtree( os.path.join(dst_package_jar_dir,", "is None: additional_hooks = [] # add jars and classes", "async def _build(self, args, env): self.progress('build') # remove anything on", "create_environment_hook from colcon_core.shell import get_command_environment from colcon_core.task import run from", "bld_dirnames, bld_filenames = self._build_file_tree( bld_package_jar_dir) dst_dirnames, dst_filenames = self._build_file_tree( dst_package_jar_dir)", "rc.returncode rc = await self._install(args, env) if rc and rc.returncode:", "os.path.relpath( os.path.join(dirname, filename), start=start_path)) return (out_dirnames, out_filenames) def add_arguments(self, *,", "install_base to environment in GRADLE_INSTALL_PREFIX env['GRADLE_INSTALL_PREFIX'] = args.install_base # invoke", "run( self.context, cmd, cwd=args.build_base, env=env) async def _install(self, args, env):", "# Gradle Executable if has_wrapper_executable(args): cmd = [str(get_wrapper_executable(args).absolute())] elif GRADLE_EXECUTABLE", "import get_wrapper_executable from colcon_gradle.task.gradle import GRADLE_EXECUTABLE from colcon_gradle.task.gradle import has_wrapper_executable", "additional_hooks=None, skip_hook_creation=False ): pkg = self.context.pkg args = self.context.args logger.info(" ]